gd.addCheckbox("Use window for AUX tiles to reduce weight of the hi-res tiles near low-res tile boundaries",this.ly_gt_use_wnd);
gd.addNumericField("Aux disparity thershold to split FG and BG (and disable AUX tile for adjustment)",this.ly_gt_rms,3);
gd.addMessage("--- others ---");
gd.addNumericField("Relative weight margins (0.0 - all 1.0, 1.0 sin^2",this.ly_marg_fract,8,3,"",
"Reduce weigt of peripheral tiles");
gd.addCheckbox("Calculate and apply lazy eye correction after disparity scan (poly or extrinsic), may repeat",this.ly_on_scan);
gd.addCheckbox("Adjust disparity using objects at infinity by changing individual tilt and azimuth ",this.ly_inf_en," disable if there are no really far objects in the scene");
gd.addNumericField("Minimal number of clusters with forced disparity to use it (otherwise keep current)",this.ly_min_forced,0);
gd.addCheckbox("Adjust azimuths and tilts",this.ly_aztilt_en,"Adjust azimuths and tilts excluding those that change disparity");
gd.addCheckbox("Adjust differential rolls",this.ly_diff_roll_en,"Adjust differential rolls (3 of 4 rolls, keeping average roll)");
// corr_wndy, // double [] window_y, // (half) window function in y-direction(perpendicular to disparity: for row0 ==1
// corr_wndx, // double [] window_x, // half of a window function in x (disparity) direction
(tile_lma_debug_level>0));// boolean debug);
}
// proceed only if CM correlation result is non-null // for compatibility with old code we need it to run regardless of the strength of the normal correlation
// proceed only if CM correlation result is non-null // for compatibility with old code we need it to run regardless of the strength of the normal correlation
if(disparity_map!=null){
if(corr_stat[cTile]!=null){
disparity_map[DISPARITY_INDEX_CM][tIndex]=-corr_stat[cTile][0];// disp_str[cTile][0]; // disparity is negative X
disparity_map[DISPARITY_INDEX_INT+1][tIndex]=-corr_stat[cTile][0]/.85+disparity_array[tileY][tileX]+disparity_corr;// disp_str[cTile][0]; // disparity is negative X
corr_wnd,// double [][] corr_wnd, // correlation window to save on re-calculation of the window
corr_wnd_inv_limited,// corr_wnd_limited, // correlation window, limited not to be smaller than threshold - used for finding max/convex areas (or null)
corrs[cTile],// double [][] corrs,
disp_dist[cTile],
rXY,// double [][] rXY, // non-distorted X,Y offset per nominal pixel of disparity
imgdtt_params.dbg_pair_mask,// int pair_mask, // which pairs to process
null,// disp_str[cTile], //corr_stat[0], // double xcenter, // preliminary center x in pixels for largest baseline
poly_disp,// double[] poly_ds, // null or pair of disparity/strength
imgdtt_params.ortho_vasw_pwr,// double vasw_pwr, // value as weight to this power,
tdl,// tile_lma_debug_level, //+2, // int debug_level,
tileX,// int tileX, // just for debug output
tileY);// int tileY
disp_str[cTile]=null;
// debug new LMA correlations
inttdl=debugCluster?tile_lma_debug_level:-3;
if(true){// debugCluster1) {
if(debugCluster&&(globalDebugLevel>-1)){// -2)) {
System.out.println("Will run new LMA for tileX="+tileX+", tileY="+tileY);
corr_wnd,// double [][] corr_wnd, // correlation window to save on re-calculation of the window
corr_wnd_inv_limited,// corr_wnd_limited, // correlation window, limited not to be smaller than threshold - used for finding max/convex areas (or null)
corrs[cTile],// double [][] corrs,
disp_dist[cTile],
rXY,// double [][] rXY, // non-distorted X,Y offset per nominal pixel of disparity
imgdtt_params.dbg_pair_mask,// int pair_mask, // which pairs to process
null,// disp_str[cTile], //corr_stat[0], // double xcenter, // preliminary center x in pixels for largest baseline
poly_disp,// double[] poly_ds, // null or pair of disparity/strength
imgdtt_params.ortho_vasw_pwr,// double vasw_pwr, // value as weight to this power,
tdl,// tile_lma_debug_level, //+2, // int debug_level,
clt_parameters.ly_aztilt_en,// boolean use_aztilts, // Adjust azimuths and tilts excluding disparity
clt_parameters.ly_diff_roll_en,//boolean use_diff_rolls, // Adjust differential rolls (3 of 4 angles)
clt_parameters.ly_inf_force,// boolean force_convergence, // if true try to adjust convergence (disparity, symmetrical parameter 0) even with no disparity
// clt_parameters.ly_inf_force, // boolean force_convergence, // if true try to adjust convergence (disparity, symmetrical parameter 0) even with no disparity
clt_parameters.ly_min_forced,// int min_num_forced, // minimal number of clusters with forced disparity to use it
// data, using just radial distortions
clt_parameters.ly_com_roll,//boolean common_roll, // Enable common roll (valid for high disparity range only)
// clt_parameters.ly_inf_force, // boolean force_convergence, // if true try to adjust convergence (disparity, symmetrical parameter 0) even with no disparity
clt_parameters.ly_min_forced,// int min_num_forced, // minimal number of clusters with forced disparity to use it
// data, using just radial distortions
clt_parameters.ly_com_roll,// boolean common_roll, // Enable common roll (valid for high disparity range only)
target_disparity,// double [][] target_disparity, // null or programmed disparity (1 per each 14 entries of scans_14)
tp.getTilesX(),// int tilesX,
clt_parameters.corr_magic_scale,// double magic_coeff, // still not understood coefficent that reduces reported disparity value. Seems to be around 8.5
debugLevelInner-1);// + (clt_parameters.fine_dbg ? 1:0)); // int debugLevel)
if(new_corr==null){
returnfalse;
}
comp_diff=0.0;
intnum_pars=0;
if(adjust_poly){
apply_fine_corr(
new_corr,
debugLevelInner+2);
for(intn=0;n<new_corr.length;n++){
for(intd=0;d<new_corr[n].length;d++){
for(inti=0;i<new_corr[n][d].length;i++){
comp_diff+=new_corr[n][d][i]*new_corr[n][d][i];
num_pars++;
}
if(apply_extrinsic){
geometryCorrection.setCorrVector(corr_vector);
System.out.println("Extrinsic correction updated (can be disabled by setting clt_parameters.ly_corr_scale = 0.0) ");
}else{
System.out.println("Correction is not applied according clt_parameters.ly_corr_scale == 0.0) ");
}
}
comp_diff=Math.sqrt(comp_diff/num_pars);
if(debugLevel>-2){
if((debugLevel>-1)||(comp_diff<min_poly_update)){
System.out.println("#### fine correction iteration step = "+(num_iter+1)+" ( of "+max_tries+") change = "+
comp_diff+" ("+min_poly_update+")");
}else{
if(debugLevel>-3){
System.out.println("LMA failed");
}
}
if(comp_diff<min_poly_update){// add other parameter to exit from poly
target_disparity,// double [][] target_disparity, // null or programmed disparity (1 per each 14 entries of scans_14)
tp.getTilesX(),// int tilesX,
clt_parameters.corr_magic_scale,// double magic_coeff, // still not understood coefficent that reduces reported disparity value. Seems to be around 8.5
debugLevelInner-1);// + (clt_parameters.fine_dbg ? 1:0)); // int debugLevel)
if(new_corr==null){
returnfalse;
}
comp_diff=0.0;
intnum_pars=0;
if(adjust_poly){
apply_fine_corr(
new_corr,
debugLevelInner+2);
for(intn=0;n<new_corr.length;n++){
for(intd=0;d<new_corr[n].length;d++){
for(inti=0;i<new_corr[n][d].length;i++){
comp_diff+=new_corr[n][d][i]*new_corr[n][d][i];
num_pars++;
}
}
}
comp_diff=Math.sqrt(comp_diff/num_pars);
if(debugLevel>-2){
if((debugLevel>-1)||(comp_diff<min_poly_update)){
System.out.println("#### fine correction iteration step = "+(num_iter+1)+" ( of "+max_tries+") change = "+
comp_diff+" ("+min_poly_update+")");
}
}
if(comp_diff<min_poly_update){// add other parameter to exit from poly