Commit b0be9975 authored by Andrey Filippov's avatar Andrey Filippov

Debugging will keep this (broken) branch

parent 54c5427a
...@@ -51,7 +51,8 @@ ...@@ -51,7 +51,8 @@
<groupId>net.imagej</groupId> <groupId>net.imagej</groupId>
<artifactId>ij</artifactId> <artifactId>ij</artifactId>
<!-- <version>1.52e</version> --> <!-- <version>1.52e</version> -->
<version>1.53j</version> <!-- <version>1.53j</version> -->
<version>1.54f</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.jcuda</groupId> <groupId>org.jcuda</groupId>
......
...@@ -278,7 +278,7 @@ public class ImageDtt extends ImageDttCPU { ...@@ -278,7 +278,7 @@ public class ImageDtt extends ImageDttCPU {
threadsMax); // final int threadsMax, // maximal number of threads to launch threadsMax); // final int threadsMax, // maximal number of threads to launch
if (tp_tasks.length == 0) { if (tp_tasks.length == 0) {
System.out.println("Empty tasks - nothing to do"); System.out.println("Empty tasks - nothing to do 1`");
return; return;
} }
//texture_tiles //texture_tiles
......
...@@ -4833,6 +4833,7 @@ public class OpticalFlow { ...@@ -4833,6 +4833,7 @@ public class OpticalFlow {
final boolean updateStatus, final boolean updateStatus,
final int debugLevel) throws Exception final int debugLevel) throws Exception
{ {
boolean render_to_scene = false; // testing separate distortion mode
int earliest_scene = 0; // increase on failure int earliest_scene = 0; // increase on failure
boolean build_ref_dsi = clt_parameters.imp.force_ref_dsi; boolean build_ref_dsi = clt_parameters.imp.force_ref_dsi;
...@@ -5210,7 +5211,7 @@ public class OpticalFlow { ...@@ -5210,7 +5211,7 @@ public class OpticalFlow {
} }
// Calculate and fill per-scene target disparities as scene.dsrbg // Calculate and fill per-scene target disparities as scene.dsrbg
double [] ref_target_disparity=quadCLTs[ref_index].getDLS()[0]; // 0]; double [] ref_target_disparity=quadCLTs[ref_index].getDLS()[0]; // 0];
double [][] interpolated_disparities = intepolateSceneDisparity( double [][] interpolated_disparities = intepolateScenesDisparity(
clt_parameters, // final CLTParameters clt_parameters, clt_parameters, // final CLTParameters clt_parameters,
quadCLTs, // final QuadCLT [] scenes, quadCLTs, // final QuadCLT [] scenes,
ref_index, // final int indx_ref, ref_index, // final int indx_ref,
...@@ -5473,6 +5474,9 @@ public class OpticalFlow { ...@@ -5473,6 +5474,9 @@ public class OpticalFlow {
if (views[ibase][2] != 0) { if (views[ibase][2] != 0) {
scenes_suffix += "-Z"+String.format("%.0f",views[ibase][2]); scenes_suffix += "-Z"+String.format("%.0f",views[ibase][2]);
} }
if (render_to_scene) {
scenes_suffix += "-TO_SCENE";
}
if (generate_mapped) { if (generate_mapped) {
double [][] ds_vantage = new double[][] {selected_disparity,selected_strength}; double [][] ds_vantage = new double[][] {selected_disparity,selected_strength};
if ((views[ibase][0] != 0) || (views[ibase][1] != 0) || (views[ibase][2] != 0)) { if ((views[ibase][0] != 0) || (views[ibase][1] != 0) || (views[ibase][2] != 0)) {
...@@ -5485,18 +5489,33 @@ public class OpticalFlow { ...@@ -5485,18 +5489,33 @@ public class OpticalFlow {
quadCLTs[ref_index], // final QuadCLT reference_QuadClt, quadCLTs[ref_index], // final QuadCLT reference_QuadClt,
8); // iscale); // final int iscale); 8); // iscale); // final int iscale);
} }
imp_scenes_pair[nstereo]= renderSceneSequence( if (render_to_scene) {
clt_parameters, // CLTParameters clt_parameters, imp_scenes_pair[nstereo]= renderSceneSequenceNonTile(
fov_tiles, // Rectangle fov_tiles, clt_parameters, // CLTParameters clt_parameters,
mode3d, // int mode3d, fov_tiles, // Rectangle fov_tiles,
toRGB, // boolean toRGB, mode3d, // int mode3d,
xyz_offset, // double [] stereo_offset, // offset reference camera {x,y,z} toRGB, // boolean toRGB,
sensor_mask, // int sensor_mask, xyz_offset, // double [] stereo_offset, // offset reference camera {x,y,z}
scenes_suffix, // String suffix, sensor_mask, // int sensor_mask,
ds_vantage[0], // selected_disparity, // double [] ref_disparity, scenes_suffix, // String suffix,
quadCLTs, // QuadCLT [] quadCLTs, ds_vantage[0], // selected_disparity, // double [] ref_disparity,
threadsMax, // int threadsMax, quadCLTs, // QuadCLT [] quadCLTs,
debugLevel); // int debugLevel); threadsMax, // int threadsMax,
debugLevel); // int debugLevel);
} else {
imp_scenes_pair[nstereo]= renderSceneSequence(
clt_parameters, // CLTParameters clt_parameters,
fov_tiles, // Rectangle fov_tiles,
mode3d, // int mode3d,
toRGB, // boolean toRGB,
xyz_offset, // double [] stereo_offset, // offset reference camera {x,y,z}
sensor_mask, // int sensor_mask,
scenes_suffix, // String suffix,
ds_vantage[0], // selected_disparity, // double [] ref_disparity,
quadCLTs, // QuadCLT [] quadCLTs,
threadsMax, // int threadsMax,
debugLevel); // int debugLevel);
}
if (save_mapped_mono_color[col_mode]) { if (save_mapped_mono_color[col_mode]) {
quadCLTs[ref_index].saveImagePlusInModelDirectory( quadCLTs[ref_index].saveImagePlusInModelDirectory(
null, // "GPU-SHIFTED-D"+clt_parameters.disparity, // String suffix, null, // "GPU-SHIFTED-D"+clt_parameters.disparity, // String suffix,
...@@ -6860,6 +6879,7 @@ public class OpticalFlow { ...@@ -6860,6 +6879,7 @@ public class OpticalFlow {
String suffix_in, String suffix_in,
double [] ref_disparity, double [] ref_disparity,
QuadCLT [] quadCLTs, QuadCLT [] quadCLTs,
// boolean render_to_scene,
int threadsMax, int threadsMax,
int debugLevel) { int debugLevel) {
boolean corr_raw_ers = true; boolean corr_raw_ers = true;
...@@ -7049,7 +7069,238 @@ public class OpticalFlow { ...@@ -7049,7 +7069,238 @@ public class OpticalFlow {
return imp_scenes; return imp_scenes;
} }
// Trying non-Tile image warping - first render to scene coordinates, then apply warping
// This method will be bad for disparity discontinuity - it is primarily to get smooth ground surface from a drone
public static ImagePlus renderSceneSequenceNonTile(
CLTParameters clt_parameters,
Rectangle fov_tiles,
int mode3d, // for older compatibility mode3d = -1 for RAW, 0 - INF, 1 - FG, 2 BG
boolean toRGB,
double [] stereo_xyz, // offset reference camera {x,y,z}
int sensor_mask,
String suffix_in,
double [] ref_disparity,
QuadCLT [] quadCLTs,
int threadsMax,
int debugLevel) {
int dbg_scene = 1;
boolean corr_raw_ers = true;
boolean filter_bg = false; // to start with
// FIXME: Move to clt_parameters;
final double max_overlap = 0.6;
final double min_adisp_cam = 0.2;
final double min_rdisp_cam = 0.03;
double [] stereo_atr = ZERO3; // maybe later play with rotated camera
boolean um_mono = clt_parameters.imp.um_mono;
double um_sigma = clt_parameters.imp.um_sigma;
double um_weight = clt_parameters.imp.um_weight;
boolean mb_en = clt_parameters.imp.mb_en && (fov_tiles==null) && (mode3d > 0);
double mb_tau = clt_parameters.imp.mb_tau; // 0.008; // time constant, sec
double mb_max_gain = clt_parameters.imp.mb_max_gain; // 5.0; // motion blur maximal gain (if more - move second point more than a pixel
final float fum_weight = (float) um_weight;
boolean merge_all = clt_parameters.imp.merge_all;
if (mode3d < 1) {
merge_all = false;
}
if (merge_all) {
sensor_mask = 1;
}
String suffix = suffix_in+((mode3d > 0)?(merge_all?"-MERGED":"-SINGLE"):"");
if ((mode3d <0) && (corr_raw_ers)) {
suffix+="-RAW_ERS";
}
int ref_index = quadCLTs.length -1;
int num_sens = quadCLTs[ref_index].getNumSensors();
ErsCorrection ers_reference = quadCLTs[ref_index].getErsCorrection();
int num_used_sens = 0;
for (int i = 0; i < num_sens; i++) if (((sensor_mask >> i) & 1) != 0) num_used_sens++;
int [] channels = new int [num_used_sens];
int nch = 0;
for (int i = 0; i < num_sens; i++) if (((sensor_mask >> i) & 1) != 0) channels[nch++] = i;
ImageStack stack_scenes = null;
double [][] ref_pXpYD = transformToScenePxPyD( // now should work with offset ref_scene
fov_tiles, // final Rectangle [] extra_woi, // show larger than sensor WOI (or null)
ref_disparity, // final double [] disparity_ref, // invalid tiles - NaN in disparity
ZERO3, // final double [] scene_xyz, // camera center in world coordinates
ZERO3, // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs[ref_index], // final QuadCLT scene_QuadClt,
quadCLTs[ref_index], // final QuadCLT reference_QuadClt, // now - may be null - for testing if scene is rotated ref
threadsMax); // int threadsMax)
for (int nscene = 0; nscene < quadCLTs.length ; nscene++) if (quadCLTs[nscene] != null){
if (nscene== dbg_scene) {
System.out.println("renderSceneSequence(): nscene = "+nscene);
}
String ts = quadCLTs[nscene].getImageName();
double [] scene_xyz = ZERO3;
double [] scene_atr = ZERO3;
if (nscene != ref_index) { // Check even for raw, so video frames will match in all modes
scene_xyz = ers_reference.getSceneXYZ(ts);
scene_atr = ers_reference.getSceneATR(ts);
if ((scene_atr==null) || (scene_xyz == null)) {
continue;
}
if ((mode3d >= 0) || corr_raw_ers) {
double [] scene_ers_xyz_dt = ers_reference.getSceneErsXYZ_dt(ts);
double [] scene_ers_atr_dt = ers_reference.getSceneErsATR_dt(ts);
quadCLTs[nscene].getErsCorrection().setErsDt(
scene_ers_xyz_dt, // double [] ers_xyz_dt,
scene_ers_atr_dt); // double [] ers_atr_dt)(ers_scene_original_xyz_dt);
if (mode3d < 0) { // velocities != 0, but offset=0
scene_xyz = ZERO3;
scene_atr = ZERO3;
}
} else { // ugly, restore for raw mode that should not be rotated/shifted
scene_xyz = ZERO3;
scene_atr = ZERO3;
}
}
if (stereo_xyz != null) { // offset all, including reference scene
double [][] combo_xyzatr = ErsCorrection.combineXYZATR(
stereo_xyz, // double [] reference_xyz,
stereo_atr, // double [] reference_atr,
scene_xyz, // double [] scene_xyz,
scene_atr); // double [] scene_atr)
scene_xyz = combo_xyzatr[0];
scene_atr = combo_xyzatr[1];
}
int sm = merge_all? -1: sensor_mask;
ImagePlus imp_scene = null;
double [][] dxyzatr_dt = null;
// should get velocities from HashMap at reference scene from timestamp , not re-calculate.
if (mb_en) {
// dxyzatr_dt = getVelocities(
// quadCLTs, // QuadCLT [] quadCLTs,
// nscene); // int nscene)
dxyzatr_dt = new double[][] { // for all, including ref
quadCLTs[nscene].getErsCorrection().getErsXYZ_dt(),
quadCLTs[nscene].getErsCorrection().getErsATR_dt()};
}
// here - difference from renderSceneSequence()
// Is it needed - from intepolateScenesDisparity()
/*
double [] scene_ers_xyz_dt = ers_reference.getSceneErsXYZ_dt(ts);
double [] scene_ers_atr_dt = ers_reference.getSceneErsATR_dt(ts);
scenes[nscene].getErsCorrection().setErsDt(
scene_ers_xyz_dt, // double [] ers_xyz_dt,
scene_ers_atr_dt); // double [] ers_atr_dt)(ers_scene_original_xyz_dt);
//setupERS() will be inside transformToScenePxPyD()
*/
// 1) Calculate disparity referenced to the current scene uniform grid (consider running re-measuring it?)
double [] disparity_scene = interpolateSingleSceneDisparity(
clt_parameters, // final CLTParameters clt_parameters,
quadCLTs[nscene], // final QuadCLT scene,
ref_disparity, // final double [] disparity_ref,
scene_xyz, // final double [] scene_xyz, // camera center in world coordinates
scene_atr, // final double [] scene_atr, // camera orientation relative to world frame
debugLevel); // final int debug_level);
// add pX, pY of the uniform grid
double [][] pXpYD = transformToScenePxPyD( // full size - [tilesX*tilesY], some nulls
null, // final Rectangle [] extra_woi, // show larger than sensor WOI (or null)
disparity_scene, // dls[0], // final double [] disparity_ref, // invalid tiles - NaN in disparity (maybe it should not be masked by margins?)
ZERO3, // final double [] scene_xyz, // camera center in world coordinates
ZERO3, // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs[nscene], // final QuadCLT scene_QuadClt,
null); // quadCLTs[nscene]); // final QuadCLT reference_QuadClt)
if (mb_en && (dxyzatr_dt != null)) {
double [][] motion_blur = getMotionBlur(
quadCLTs[nscene], // QuadCLT ref_scene,
quadCLTs[nscene], // QuadCLT scene, // can be the same as ref_scene
pXpYD, // double [][] ref_pXpYD, // here it is scene, not reference!
scene_xyz, // double [] camera_xyz,
scene_atr, // double [] camera_atr,
dxyzatr_dt[0], // double [] camera_xyz_dt,
dxyzatr_dt[1], // double [] camera_atr_dt,
0, // int shrink_gaps, // will gaps, but not more that grow by this
debugLevel); // int debug_level)
imp_scene = QuadCLT.renderGPUFromDSI(
sm, // final int sensor_mask,
merge_all, // final boolean merge_channels,
null, // final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters, // CLTParameters clt_parameters,
disparity_scene, // double [] disparity_ref,
// motion blur compensation
mb_tau, // double mb_tau, // 0.008; // time constant, sec
mb_max_gain, // double mb_max_gain, // 5.0; // motion blur maximal gain (if more - move second point more than a pixel
motion_blur, // double [][] mb_vectors, //
ZERO3, // scene_xyz, // final double [] scene_xyz, // camera center in world coordinates
ZERO3, // scene_atr, // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs[nscene], // final QuadCLT scene,
null, // quadCLTs[nscene], // final QuadCLT ref_scene, // now - may be null - for testing if scene is rotated ref
toRGB, // final boolean toRGB,
(toRGB? clt_parameters.imp.show_color_nan : clt_parameters.imp.show_mono_nan),
"", // String suffix, no suffix here
Double.NaN, // double fill_value, - use instead of image
QuadCLT.THREADS_MAX, // int threadsMax,
debugLevel); // int debugLevel)
} else {
imp_scene = QuadCLT.renderGPUFromDSI(
sm, // final int sensor_mask,
merge_all, // final boolean merge_channels,
fov_tiles, // testr, // null, // final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters, // CLTParameters clt_parameters,
ref_disparity, // double [] disparity_ref,
// not used, just as null/not null now
// null means uniform grid, no view transform. even with 0 rot ERS was changing results
ZERO3, // ((!corr_raw_ers && (mode3d<0))? null:scene_xyz), // final double [] scene_xyz, // camera center in world coordinates
ZERO3, // ((!corr_raw_ers && (mode3d<0))? null:scene_atr), // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs[nscene], // final QuadCLT scene,
null, // quadCLTs[nscene], // final QuadCLT ref_scene, // now - may be null - for testing if scene is rotated ref
toRGB, // final boolean toRGB,
(toRGB? clt_parameters.imp.show_color_nan : clt_parameters.imp.show_mono_nan),
"", // String suffix, no suffix here
Double.NaN, // double fill_value,// - use instead of image
QuadCLT.THREADS_MAX, // int threadsMax,
debugLevel); // int debugLevel)
}
if (stack_scenes == null) {
stack_scenes = new ImageStack(imp_scene.getWidth(),imp_scene.getHeight());
}
for (int i = 0; i < channels.length; i++) {
stack_scenes.addSlice(
ts+"-"+channels[i],
imp_scene.getStack().getPixels(i+1));
}
}
// Apply unsharp mask here, in parallel
if (um_mono && !toRGB) {
final ImageStack fstack_scenes = stack_scenes;
final int nSlices = fstack_scenes.getSize();
final Thread[] threads = ImageDtt.newThreadArray(QuadCLT.THREADS_MAX);
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nSlice = ai.getAndIncrement(); nSlice < nSlices; nSlice = ai.getAndIncrement()) {
FloatProcessor fp = (FloatProcessor) fstack_scenes.getProcessor(nSlice+1);
float [] fpixels = (float[]) fstack_scenes.getPixels(nSlice+1);
float [] fpixels_orig = fpixels.clone();
(new GaussianBlur()).blurFloat(
fp, // FloatProcessor ip,
um_sigma, // double sigmaX,
um_sigma, // double sigmaY,
0.01); // double accuracy)
for (int i = 0; i < fpixels.length; i++) {
fpixels[i] = fpixels_orig[i] - fum_weight * fpixels[i];
}
}
}
};
}
ImageDtt.startAndJoin(threads);
}
ImagePlus imp_scenes = new ImagePlus(suffix, stack_scenes);
imp_scenes.getProcessor().resetMinAndMax();
return imp_scenes;
}
public static double [][] getSceneSZXY( public static double [][] getSceneSZXY(
QuadCLT scene, QuadCLT scene,
...@@ -10571,13 +10822,12 @@ public class OpticalFlow { ...@@ -10571,13 +10822,12 @@ public class OpticalFlow {
return combo_dsn; return combo_dsn;
} }
public static double [][] intepolateSceneDisparity( public static double [][] intepolateScenesDisparity(
final CLTParameters clt_parameters, final CLTParameters clt_parameters,
final QuadCLT [] scenes, final QuadCLT [] scenes,
final int indx_ref, final int indx_ref,
final double [] disparity_ref, // disparity in the reference view tiles (Double.NaN - invalid) final double [] disparity_ref, // disparity in the reference view tiles (Double.NaN - invalid)
final int debug_level final int debug_level)
)
{ {
final int scene_extrap_irad = 1; final int scene_extrap_irad = 1;
final double scene_extrap_rad = scene_extrap_irad + 0.5; final double scene_extrap_rad = scene_extrap_irad + 0.5;
...@@ -10610,8 +10860,6 @@ public class OpticalFlow { ...@@ -10610,8 +10860,6 @@ public class OpticalFlow {
// } // }
} else { } else {
final Matrix [][] scene_approx = new Matrix[disparity_ref.length][];
Arrays.fill(disparity_scenes[nscene], Double.NaN);
double [] scene_xyz = ers_reference.getSceneXYZ(ts); double [] scene_xyz = ers_reference.getSceneXYZ(ts);
double [] scene_atr = ers_reference.getSceneATR(ts); double [] scene_atr = ers_reference.getSceneATR(ts);
if ((scene_xyz == null) || (scene_atr == null)){ if ((scene_xyz == null) || (scene_atr == null)){
...@@ -10623,6 +10871,19 @@ public class OpticalFlow { ...@@ -10623,6 +10871,19 @@ public class OpticalFlow {
scene_ers_xyz_dt, // double [] ers_xyz_dt, scene_ers_xyz_dt, // double [] ers_xyz_dt,
scene_ers_atr_dt); // double [] ers_atr_dt)(ers_scene_original_xyz_dt); scene_ers_atr_dt); // double [] ers_atr_dt)(ers_scene_original_xyz_dt);
//setupERS() will be inside transformToScenePxPyD() //setupERS() will be inside transformToScenePxPyD()
// Testing interpolateSingleSceneDisparity() instead of the commented-out code below
disparity_scenes[nscene] = interpolateSingleSceneDisparity(
clt_parameters, // final CLTParameters clt_parameters,
scenes[nscene], // final QuadCLT scene,
disparity_ref, // final double [] disparity_ref,
scene_xyz, // final double [] scene_xyz, // camera center in world coordinates
scene_atr, // final double [] scene_atr, // camera orientation relative to world frame
debug_level); // final int debug_level);
/*
final Matrix [][] scene_approx = new Matrix[disparity_ref.length][];
Arrays.fill(disparity_scenes[nscene], Double.NaN);
double [][] scene_pXpYD_prefilter = transformToScenePxPyD( // will be null for disparity == NaN, total size - tilesX*tilesY double [][] scene_pXpYD_prefilter = transformToScenePxPyD( // will be null for disparity == NaN, total size - tilesX*tilesY
null, // final Rectangle [] extra_woi, // show larger than sensor WOI (or null) null, // final Rectangle [] extra_woi, // show larger than sensor WOI (or null)
disparity_ref, // final double [] disparity_ref, // invalid tiles - NaN in disparity (maybe it should not be masked by margins?) disparity_ref, // final double [] disparity_ref, // invalid tiles - NaN in disparity (maybe it should not be masked by margins?)
...@@ -10706,19 +10967,19 @@ public class OpticalFlow { ...@@ -10706,19 +10967,19 @@ public class OpticalFlow {
B[0][0] += dsxd; B[0][0] += dsxd;
B[1][0] += dsyd; B[1][0] += dsyd;
B[2][0] += dsd; B[2][0] += dsd;
/* ax + by + c ~= d // ax + by + c ~= d
a * sx2 + b * sxy + c * sx - sxd = 0 // a * sx2 + b * sxy + c * sx - sxd = 0
a * sxy + b * sy2 + c * sy - syd = 0 // a * sxy + b * sy2 + c * sy - syd = 0
a * sx + b * sy + c * s0 - sd = 0 // a * sx + b * sy + c * s0 - sd = 0
| sx2 sxy sx | | a | | sxd | // | sx2 sxy sx | | a | | sxd |
| sxy sy2 sy | * | b | = | syd | // | sxy sy2 sy | * | b | = | syd |
| sx sy s0 | | c | | sd | */ // | sx sy s0 | | c | | sd |
} }
} }
} }
} }
} }
} } // for (int nTile = 0;...
for (int nTile = 0; nTile < scene_pXpYD.length; nTile++) if (scene_approx[nTile] != null) { for (int nTile = 0; nTile < scene_pXpYD.length; nTile++) if (scene_approx[nTile] != null) {
if (debug_level > -1) { if (debug_level > -1) {
int tileY = nTile / tilesX; int tileY = nTile / tilesX;
...@@ -10740,8 +11001,9 @@ public class OpticalFlow { ...@@ -10740,8 +11001,9 @@ public class OpticalFlow {
disparity_scenes[nscene][nTile] = scene_approx[nTile][1].get(2,0) / A[2][2]; // Double.NaN; disparity_scenes[nscene][nTile] = scene_approx[nTile][1].get(2,0) / A[2][2]; // Double.NaN;
} }
} }
} // for (int nTile = 0;...
} */ // end of replaced code
} }
if (debug_level > -1) { if (debug_level > -1) {
if (nscene == indx_ref) { if (nscene == indx_ref) {
...@@ -10762,6 +11024,151 @@ public class OpticalFlow { ...@@ -10762,6 +11024,151 @@ public class OpticalFlow {
return disparity_scenes; return disparity_scenes;
} }
public static double [] interpolateSingleSceneDisparity(
final CLTParameters clt_parameters,
final QuadCLT scene,
final double [] disparity_ref,
final double [] scene_xyz, // camera center in world coordinates
final double [] scene_atr, // camera orientation relative to world frame
final int debug_level) {
final int scene_extrap_irad = 1;
final double scene_extrap_rad = scene_extrap_irad + 0.5;
final int dbg_tileX=70;
final int dbg_tileY=19;
final int tilesX = scene.getTileProcessor().getTilesX();
final int tilesY = scene.getTileProcessor().getTilesY();
final int tileSize = scene.getTileProcessor().getTileSize();
double max_rad2 = scene_extrap_rad * scene_extrap_rad * tileSize * tileSize; // in pixels
final Matrix [][] scene_approx = new Matrix[disparity_ref.length][];
final double [] disparity_scene = new double [tilesX*tilesY];
Arrays.fill(disparity_scene, Double.NaN);
final int dbg_tile=dbg_tileY * tilesX + dbg_tileX;
double [][] scene_pXpYD; // disparity in the reference view tiles (Double.NaN - invalid)
///****************
double [][] scene_pXpYD_prefilter = transformToScenePxPyD( // will be null for disparity == NaN, total size - tilesX*tilesY
null, // final Rectangle [] extra_woi, // show larger than sensor WOI (or null)
disparity_ref, // final double [] disparity_ref, // invalid tiles - NaN in disparity (maybe it should not be masked by margins?)
scene_xyz, // final double [] scene_xyz, // camera center in world coordinates
scene_atr, // final double [] scene_atr, // camera orientation relative to world frame
scene, // final QuadCLT scene_QuadClt,
null); // ref_scene); // final QuadCLT reference_QuadClt)
double max_overlap = 0.6;
double min_adisp_cam = 0.2;
double min_rdisp_cam = 0.03;
double [][] scene_ds =conditionInitialDS(
clt_parameters, // CLTParameters clt_parameters,
scene, // QuadCLT scene,
-1); // int debug_level);
if (scene_ds != null) {
double [] disparity_cam = scene_ds[0]; // null; // for now
scene_pXpYD = filterBG (
scene.getTileProcessor(), // final TileProcessor tp,
scene_pXpYD_prefilter, // final double [][] pXpYD,
max_overlap, // final double max_overlap,
null, // disparity_cam, // final double [] disparity_cam,
min_adisp_cam, // final double min_adisp_cam,
min_rdisp_cam, // final double min_rdisp_cam,
clt_parameters.tileX, // final int dbg_tileX,
clt_parameters.tileY, // final int dbg_tileY,
0); // 1); //debug_level); // final int debug_level);
} else {
scene_pXpYD = scene_pXpYD_prefilter;
}
// acummulate single-thread
//tileSize
for (int nTile = 0; nTile < scene_pXpYD.length; nTile++) if ((scene_pXpYD[nTile] != null) && !Double.isNaN(scene_pXpYD[nTile][2])) {
//final int scene_extrap_irad = 1;
// double max_rad2 = scene_extrap_irad * scene_extrap_irad;
// int tileX0=nTile % tilesX;
// int tileY0=nTile / tilesX;
int tileX0=(int) Math.floor(scene_pXpYD[nTile][0]/tileSize);
int tileY0=(int) Math.floor(scene_pXpYD[nTile][1]/tileSize);
if ((debug_level > -1) && (Math.abs(tileY0-dbg_tileY) < 2) && (Math.abs(tileX0-dbg_tileX) < 2)) {
System.out.println("nTile="+nTile+", tilX0="+tileX0+", tileY0="+tileY0);
}
for (int dTy = -scene_extrap_irad; dTy <= scene_extrap_irad; dTy++) {
int tileY = tileY0 + dTy;
double dy = tileY * tileSize + tileSize/2 - scene_pXpYD[nTile][1];
if ((tileY >=0) && (tileY < tilesY)) {
for (int dTx = -scene_extrap_irad; dTx <= scene_extrap_irad; dTx++) {
int tileX = tileX0 + dTx;
if ((tileX >=0) && (tileX < tilesX)) {
if ((debug_level > -1) && (tileY == dbg_tileY) && (tileX == dbg_tileX)) {
System.out.println("tileX="+tileX+", tileY="+tileY);
}
double dx = tileX * tileSize + tileSize/2 - scene_pXpYD[nTile][0];
double rad2 = dy*dy+dx*dx;
if (rad2 < max_rad2) {
int tile = tileY * tilesX + tileX;
double w = 1 - (rad2/max_rad2);
if (scene_approx[tile] == null) {
scene_approx[tile] = new Matrix[2];
scene_approx[tile][0] = new Matrix(3,3); // A
scene_approx[tile][1] = new Matrix(3,1); // B
}
double d = scene_pXpYD[nTile][2];
double dsx = w *dx;
double dsy = w *dy;
double dsx2 = dsx*dx;
double dsy2 = dsy*dy;
double dsxy = dsx*dy;
double ds0 = w;
double dsxd = dsx * d;
double dsyd = dsy * d;
double dsd = ds0 * d;
double [][] A = scene_approx[tile][0].getArray();
A[0][0] += dsx2;
A[0][1] += dsxy;
A[0][2] += dsx;
A[1][1] += dsy2;
A[1][2] += dsy;
A[2][2] += ds0;
double [][] B = scene_approx[tile][1].getArray();
B[0][0] += dsxd;
B[1][0] += dsyd;
B[2][0] += dsd;
/* ax + by + c ~= d
a * sx2 + b * sxy + c * sx - sxd = 0
a * sxy + b * sy2 + c * sy - syd = 0
a * sx + b * sy + c * s0 - sd = 0
| sx2 sxy sx | | a | | sxd |
| sxy sy2 sy | * | b | = | syd |
| sx sy s0 | | c | | sd | */
}
}
}
}
}
} // for (int nTile = 0;...
for (int nTile = 0; nTile < scene_pXpYD.length; nTile++) if (scene_approx[nTile] != null) {
if (debug_level > -1) {
int tileY = nTile / tilesX;
int tileX = nTile % tilesX;
if ((tileY == dbg_tileY) && (tileX == dbg_tileX)) {
System.out.println("tileX="+tileX+", tileY="+tileY);
}
}
double [][] A = scene_approx[nTile][0].getArray();
if (A[2][2] > 0) {
A[1][0] = A[0][1];
A[2][0] = A[0][2];
A[2][1] = A[1][2];
try {
Matrix abc = scene_approx[nTile][0].solve(scene_approx[nTile][1]);
disparity_scene[nTile] = abc.get(2, 0) + abc.get(0, 0)*tileSize/2 + abc.get(1, 0)*tileSize/2;
} catch (RuntimeException e){
// Use just average of disparities
disparity_scene[nTile] = scene_approx[nTile][1].get(2,0) / A[2][2]; // Double.NaN;
}
}
} // for (int nTile = 0;...
return disparity_scene;
}
// Cleaned up and optimized version to reduce memory usage (on-the-fly integration, not saving full correlation data) // Cleaned up and optimized version to reduce memory usage (on-the-fly integration, not saving full correlation data)
public static double[][] correlateInterscene( public static double[][] correlateInterscene(
final CLTParameters clt_parameters, final CLTParameters clt_parameters,
......
...@@ -2386,7 +2386,7 @@ public class QuadCLT extends QuadCLTCPU { ...@@ -2386,7 +2386,7 @@ public class QuadCLT extends QuadCLTCPU {
0x3f, // int corr_mask, // which correlation pairs to generate (maybe later - reduce size from 15x15) 0x3f, // int corr_mask, // which correlation pairs to generate (maybe later - reduce size from 15x15)
debugLevel); // final int debugLevel) - not yet used debugLevel); // final int debugLevel) - not yet used
if (tp_tasks.length == 0) { if (tp_tasks.length == 0) {
System.out.println("Empty tasks - nothing to do"); System.out.println("Empty tasks - nothing to do 2");
return null; return null;
} }
...@@ -2958,7 +2958,7 @@ public class QuadCLT extends QuadCLTCPU { ...@@ -2958,7 +2958,7 @@ public class QuadCLT extends QuadCLTCPU {
0x3f, // int corr_mask, // which correlation pairs to generate (maybe later - reduce size from 15x15) 0x3f, // int corr_mask, // which correlation pairs to generate (maybe later - reduce size from 15x15)
debugLevel); // final int debugLevel) - not yet used debugLevel); // final int debugLevel) - not yet used
if (tp_tasks.length == 0) { if (tp_tasks.length == 0) {
System.out.println("--- Empty tasks - nothing to do ---"); System.out.println("--- Empty tasks - nothing to do 3---");
return null; return null;
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment