Commit c3f2181a authored by Andrey Filippov's avatar Andrey Filippov

Before row/col

parent 8550fc6e
......@@ -1279,7 +1279,7 @@ public class GpuQuad{ // quad camera description
if (!force && this.gpuTileProcessor.bayer_set && !quadCLT.hasNewImageData()) {
return;
}
double [][][] bayer_data = quadCLT.getImageData(); // resets hasNewImageData()
double [][][] bayer_data = quadCLT.getResetImageData(); // resets hasNewImageData()
setBayerImages(
bayer_data,
true);
......
......@@ -664,10 +664,14 @@ min_str_neib_fpn 0.35
public double cuas_decay_average = 100.0; // Decay in seconds for cimulative CLT
public double cuas_keep_fraction = 0.9; // Filter CLT variants for tiles keeping at least this fraction of the total weight
public boolean cuas_step = true; // recalculate template image after each fitting step
public boolean cuas_subtract_fpn= true; // Subtract FPN
public boolean cuas_calc_fpn = false; // Recalculate+save FPN unconditionally, if false - try to read saved one
public double cuas_rot_period = 175.0; // rotation period of te gimbal mount (in scenes)
public boolean cuas_debug = false; // save debug images (and show them if not in batch mode)
public boolean cuas_step_debug = false; // save debug images during per-step cuas recalculation (and show them if not in batch mode)
// TODO: move next parameters elsewhere - they are not the motion blur ones.
public int mb_gain_index_pose = 5; // pose readjust pass to switch to full mb_max_gain from mb_max_gain_inter
public int mb_gain_index_depth = 5; // depth map refine pass (SfM) to switch to full mb_max_gain from mb_max_gain_inter
......@@ -731,6 +735,7 @@ min_str_neib_fpn 0.35
public String video_codec_combo = "vp8"; // applies when combining videos
public int video_crf_combo = 40; // lower - better, larger file size applies when combining videos
public boolean add_average = true;
public boolean calculate_average = false; // Calculate average from the slices. False - use CLT average if available.
public boolean subtract_average = false;
public int running_average = 0;
public boolean extract_center_orientation = true; // in lock_position mode only - debug feature
......@@ -1995,10 +2000,15 @@ min_str_neib_fpn 0.35
"Filter tile CLT variants that keep fraction of the total weight.");
gd.addCheckbox ("Recalculate after each tuning step", this.cuas_step,
"Recalciulate correlation pattern after each position/orientation and disparity adjustment step.");
gd.addCheckbox ("Subtract FPN", this.cuas_subtract_fpn,
"Subtract FPN from the channel images.");
gd.addCheckbox ("(Re)calculate FPN", this.cuas_calc_fpn,
"Recalculate+save FPN unconditionally, if false - try to read saved one.");
gd.addNumericField("Rotation period", this.cuas_rot_period, 5,7,"scene periods",
"Used for averaging FPN for integer number of rotation periods. TODO: calculate from the actual rotation.");
gd.addMessage("=== Debug ===");
gd.addCheckbox ("Save/show debug images", this.cuas_debug,
"Save CUAS-related debug images and show them in non-batch mode.");
gd.addMessage("=== Debug ===");
gd.addCheckbox ("Save/show debug images for each tuning step",this.cuas_step_debug,
"Save CUAS-related debug images during per-step cuas recalculation and show them in non-batch mode.");
......@@ -2127,8 +2137,10 @@ min_str_neib_fpn 0.35
"FFMPEG video encoder, such as \"VP8\" or \"VP9\". Applies when merging segments.");
gd.addNumericField("Video CRF for combining", this.video_crf_combo, 0,3,"",
"Quality - the lower the better. 40 - OK. Applies when merging segments.");
gd.addCheckbox ("Average slice", this.add_average,
gd.addCheckbox ("Insert average slice", this.add_average,
"Insert average slice before scene slices");
gd.addCheckbox ("Calculate average from slices", this.calculate_average,
"Calculate average slice from other slices. If false - try to use average CLT if available.");
gd.addCheckbox ("Subtract average", this.subtract_average,
"Subtract average slice from all scenes");
gd.addNumericField("Running average length", this.running_average, 0,3,"",
......@@ -2896,6 +2908,10 @@ min_str_neib_fpn 0.35
this.cuas_decay_average = gd.getNextNumber();
this.cuas_keep_fraction = gd.getNextNumber();
this.cuas_step = gd.getNextBoolean();
this.cuas_subtract_fpn = gd.getNextBoolean();
this.cuas_calc_fpn = gd.getNextBoolean();
this.cuas_rot_period = gd.getNextNumber();
this.cuas_debug = gd.getNextBoolean();
this.cuas_step_debug = gd.getNextBoolean();
......@@ -2961,6 +2977,7 @@ min_str_neib_fpn 0.35
this.video_codec_combo= gd.getNextString();
this.video_crf_combo = (int) gd.getNextNumber();
this.add_average = gd.getNextBoolean();
this.calculate_average = gd.getNextBoolean();
this.subtract_average = gd.getNextBoolean();
this.running_average = (int) gd.getNextNumber();
this.extract_center_orientation=gd.getNextBoolean();
......@@ -3723,6 +3740,11 @@ min_str_neib_fpn 0.35
properties.setProperty(prefix+"cuas_decay_average", this.cuas_decay_average+""); // double
properties.setProperty(prefix+"cuas_keep_fraction", this.cuas_keep_fraction+""); // double
properties.setProperty(prefix+"cuas_step", this.cuas_step+""); // boolean
properties.setProperty(prefix+"cuas_subtract_fpn", this.cuas_subtract_fpn+""); // boolean
properties.setProperty(prefix+"cuas_calc_fpn", this.cuas_calc_fpn+""); // boolean
properties.setProperty(prefix+"cuas_rot_period", this.cuas_rot_period+""); // double
properties.setProperty(prefix+"cuas_debug", this.cuas_debug+""); // boolean
properties.setProperty(prefix+"cuas_step_debug", this.cuas_step_debug+""); // boolean
......@@ -3780,6 +3802,7 @@ min_str_neib_fpn 0.35
properties.setProperty(prefix+"video_codec_combo", this.video_codec_combo+""); // String
properties.setProperty(prefix+"video_crf_combo", this.video_crf_combo+""); // int
properties.setProperty(prefix+"add_average", this.add_average+""); // boolean
properties.setProperty(prefix+"calculate_average", this.calculate_average+""); // boolean
properties.setProperty(prefix+"subtract_average", this.subtract_average+""); // boolean
properties.setProperty(prefix+"running_average", this.running_average+""); // int
......@@ -4525,6 +4548,10 @@ min_str_neib_fpn 0.35
if (properties.getProperty(prefix+"cuas_decay_average")!=null) this.cuas_decay_average=Double.parseDouble(properties.getProperty(prefix+"cuas_decay_average"));
if (properties.getProperty(prefix+"cuas_keep_fraction")!=null) this.cuas_keep_fraction=Double.parseDouble(properties.getProperty(prefix+"cuas_keep_fraction"));
if (properties.getProperty(prefix+"cuas_step")!=null) this.cuas_step=Boolean.parseBoolean(properties.getProperty(prefix+"cuas_step"));
if (properties.getProperty(prefix+"cuas_subtract_fpn")!=null) this.cuas_subtract_fpn=Boolean.parseBoolean(properties.getProperty(prefix+"cuas_subtract_fpn"));
if (properties.getProperty(prefix+"cuas_calc_fpn")!=null) this.cuas_calc_fpn=Boolean.parseBoolean(properties.getProperty(prefix+"cuas_calc_fpn"));
if (properties.getProperty(prefix+"cuas_rot_period")!=null) this.cuas_rot_period=Double.parseDouble(properties.getProperty(prefix+"cuas_rot_period"));
if (properties.getProperty(prefix+"cuas_debug")!=null) this.cuas_debug=Boolean.parseBoolean(properties.getProperty(prefix+"cuas_debug"));
if (properties.getProperty(prefix+"cuas_step_debug")!=null) this.cuas_step_debug=Boolean.parseBoolean(properties.getProperty(prefix+"cuas_step_debug"));
......@@ -4583,6 +4610,8 @@ min_str_neib_fpn 0.35
if (properties.getProperty(prefix+"video_codec_combo")!=null) this.video_codec_combo=(String) properties.getProperty(prefix+"video_codec_combo");
if (properties.getProperty(prefix+"video_crf_combo")!=null) this.video_crf_combo=Integer.parseInt(properties.getProperty(prefix+"video_crf_combo"));
if (properties.getProperty(prefix+"add_average")!=null) this.add_average=Boolean.parseBoolean(properties.getProperty(prefix+"add_average"));
if (properties.getProperty(prefix+"calculate_average")!=null) this.calculate_average=Boolean.parseBoolean(properties.getProperty(prefix+"calculate_average"));
//
if (properties.getProperty(prefix+"subtract_average")!=null) this.subtract_average=Boolean.parseBoolean(properties.getProperty(prefix+"subtract_average"));
if (properties.getProperty(prefix+"running_average")!=null) this.running_average=Integer.parseInt(properties.getProperty(prefix+"running_average"));
if (properties.getProperty(prefix+"extract_center_orientation")!=null)this.extract_center_orientation=Boolean.parseBoolean(properties.getProperty(prefix+"extract_center_orientation"));
......@@ -5327,6 +5356,10 @@ min_str_neib_fpn 0.35
imp.cuas_decay_average = this.cuas_decay_average;
imp.cuas_keep_fraction = this.cuas_keep_fraction;
imp.cuas_step = this.cuas_step;
imp.cuas_subtract_fpn = this.cuas_subtract_fpn;
imp.cuas_calc_fpn = this.cuas_calc_fpn;
imp.cuas_rot_period = this.cuas_rot_period;
imp.cuas_debug = this.cuas_debug;
imp.cuas_step_debug = this.cuas_step_debug;
......@@ -5386,6 +5419,7 @@ min_str_neib_fpn 0.35
imp.video_codec_combo = this.video_codec_combo;
imp.video_crf_combo = this.video_crf_combo;
imp.add_average = this.add_average;
imp.calculate_average = this.calculate_average;
imp.subtract_average = this.subtract_average;
imp.running_average = this.running_average;
imp.extract_center_orientation=this.extract_center_orientation;
......
......@@ -958,7 +958,7 @@ public class MultisceneLY {
image_dtt.getCorrelation2d();
double [][][][] dcorr_td = new double[tp_tasks_scenes[nscene].length][][][];
image_dtt.quadCorrTD( // clt_data [task][sensor][color][][];
scene.getImageData(), // final double [][][] image_data, // first index - number of image in a quad
scene.getResetImageData(), // final double [][][] image_data, // first index - number of image in a quad
scene_gc.getSensorWH()[0], // final int width,
tp_tasks_scenes[nscene], // tp_tasks, // final TpTask [] tp_tasks,
clt_parameters.img_dtt, // final ImageDttParameters imgdtt_params, // Now just extra correlation parameters, later will include, most others
......
......@@ -3373,7 +3373,7 @@ public class OpticalFlow {
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nTile = ai.getAndIncrement(); nTile < tiles; nTile = ai.getAndIncrement()) if (dsrbg_camera[QuadCLT.DSRBG_STRENGTH][nTile] > 0.0) {
for (int nTile = ai.getAndIncrement(); nTile < tiles; nTile = ai.getAndIncrement()) if ((dsrbg_camera[QuadCLT.DSRBG_STRENGTH] == null) || (dsrbg_camera[QuadCLT.DSRBG_STRENGTH][nTile] > 0.0)) { // null pointer
double disparity = dsrbg_camera[QuadCLT.DSRBG_DISPARITY][nTile];
if (!Double.isNaN(disparity)) {
int tileY = nTile / tilesX;
......@@ -3409,7 +3409,7 @@ public class OpticalFlow {
int sTile = spx + spy* stilesX;
ds[QuadCLT.DSRBG_DISPARITY][sTile] = d; // pXpYD[2]; //reduce*
for (int i = QuadCLT.DSRBG_STRENGTH; i < dsrbg_camera.length; i++) {
ds[i][sTile] = dsrbg_camera[i][nTile]; // reduce *
ds[i][sTile] = (dsrbg_camera[i] != null) ?dsrbg_camera[i][nTile]:0.001; // reduce *
}
}
}
......@@ -4841,6 +4841,14 @@ public class OpticalFlow {
double sfm_fracall = clt_parameters.imp.sfm_fracall; // 0.3; // minimal relative area of the SfM-enabled tiles (do not apply filter if less)
double min_ref_frac= clt_parameters.imp.min_ref_frac;
boolean cuas_subtract_fpn = clt_parameters.imp.cuas_subtract_fpn;
boolean cuas_calc_fpn = clt_parameters.imp.cuas_calc_fpn;
double cuas_rot_period = clt_parameters.imp.cuas_rot_period;
boolean cuas_debug = clt_parameters.imp.cuas_debug; // save debug images (and show them if not in batch mode)
boolean cuas_step_debug = clt_parameters.imp.cuas_step_debug;
double [] ref_blue_sky = null; // turn off "lma" in the ML output
if (reuse_video) { // disable all other options
generate_mapped = false;
......@@ -4945,7 +4953,6 @@ public class OpticalFlow {
if (cuas_centers != null) {
parent_clt_name = cuas_centers[0];
}
boolean cuas_debug = clt_parameters.imp.cuas_debug; // save debug images (and show them if not in batch mode)
double [] dbg_weights = cuas_debug ? new double [quadCLTs[last_index].getTilesX()*quadCLTs[last_index].getTilesY()] : null;
center_CLT = QuadCLT.restoreCenterClt(
clt_parameters, // CLTParameters clt_parameters,
......@@ -5144,7 +5151,6 @@ public class OpticalFlow {
updateStatus, // final boolean updateStatus,
debugLevel); // int debugLevel)
if (center_CLT != null) {
System.out.println ("In CUAS mode dsi[TwoQuadCLT.DSI_SPREAD_AUX] == null), breaking loop");
System.out.println ("In CUAS mode dsi[TwoQuadCLT.DSI_SPREAD_AUX] == null), breaking loop");
break;
}
......@@ -5154,6 +5160,80 @@ public class OpticalFlow {
}
}
} // while (blue_sky == null)
if ((center_CLT != null) && center_CLT.hasCenterClt()) {
int fpn_width = center_CLT.getTilesX() * center_CLT.getTileSize(); // see if center_CLT can be used
if (cuas_subtract_fpn) {
boolean show_fpn = cuas_debug && !clt_parameters.batch_run; //
boolean changed = quadCLT_main.isPhotometricUpdatedAndReset();
if (changed) {
System.out.println ("00.re-spawning with updated photogrammetric calibration of reference scene.");
} else {
System.out.println ("00.no update, spawning only over null");
}
for (int scene_index = last_index; scene_index >= earliest_scene ; scene_index--) {
// should we skip if already exists? Or need to re-run to apply new photometric calibration?
// Or should photogrammetric calibration be saved with center_CLT?
// to include ref scene photometric calibration
if (changed || (quadCLTs[scene_index] == null)) {
quadCLTs[scene_index] = quadCLTs[last_index].spawnNoModelQuadCLT( // restores image data
set_channels[scene_index].set_name,
clt_parameters,
colorProcParameters, //
threadsMax,
debugLevel-2);
}
}
double [][][] fpn = center_CLT.readImageFPN ( -1); // int sens_mask);
if ((fpn == null) || cuas_calc_fpn) {
if (debugLevel >-3) {
System.out.println("Calculating FPN.");
}
int num_scenes = quadCLTs.length;
int rot_periods = (int) Math.floor(num_scenes/cuas_rot_period);
int rot_scenes = (int) Math.floor(rot_periods *cuas_rot_period);
int [] rot_range = {0, rot_scenes-1};
fpn = QuadCLT.calculateFPN(
quadCLTs, // final QuadCLT [] quadCLTs,
rot_range, // final int [] range, // required
-1, // final int sensor_mask,
debugLevel); // final int debugLevel)
int dbg_sens = 12;
if (cuas_debug && (dbg_sens >= 0)) {
center_CLT.debugFPN(
quadCLTs, // QuadCLT [] quadCLTs,
fpn, // double [][][] fpn,
rot_range, // int [] range,
dbg_sens, // int nsens,
show_fpn); // boolean show) {
}
} else {
if (debugLevel >-3) {
System.out.println("Reusing FPN.");
}
}
// center_CLT.setImageData(fpn); // included in center_CLT.setApplyFPN(). // setting FPN images to the virtual (center) scene
center_CLT.saveShowFPN(
fpn,// double [][][] fpn,
fpn_width, // int width,
true, // boolean save,
show_fpn); // boolean show) {
center_CLT.setApplyFPN(
quadCLTs, // QuadCLT [] quadCLTs,
fpn);// double [][][] fpn)
} else {
if (debugLevel >-3) {
System.out.println("Skipping FPN.");
}
// center_CLT.setImageData(null);
center_CLT.setApplyFPN(
quadCLTs, // QuadCLT [] quadCLTs,
null);// double [][][] fpn)
}
}
// by now if it was center_reference the last ref_dsi is restored with optional blue sky.
if (center_CLT == null) { // does not work in CUAS mode
......@@ -5225,46 +5305,11 @@ public class OpticalFlow {
int ref_index = last_index; // old versions
double [][] xyzatr_ims_center = null;
double [] atr_ims_center = null;
if (force_initial_orientations && !reuse_video) {
// if (use_cuas) {
// try to restore if exists
// center_CLT = QuadCLT.restoreCenterClt(
// quadCLTs[last_index]); // ref_index]);
/*
if (center_CLT != null) { // no sense to calculate center if CLT data is not available (for now will need to manually copy)
// to this vXX version
if (debugLevel > -3) {
System.out.println("Restored center CLT in CUAS mode");
}
// quadCLTs[last_index] should be known here
// for (int scene_index = ref_index - 1; scene_index >= 0 ; scene_index--) if (quadCLTs[scene_index] == null){
for (int scene_index = last_index - 1; scene_index >= 0 ; scene_index--) if (quadCLTs[scene_index] == null){
// to include ref scene photometric calibration
quadCLTs[scene_index] = quadCLTs[last_index].spawnNoModelQuadCLT(
set_channels[scene_index].set_name,
clt_parameters,
colorProcParameters, //
threadsMax,
debugLevel-2);
} // split cycles to remove output clutter
xyzatr_ims_center = Interscene.getXyzatrImsCenter( // use [0], [1] - radii
clt_parameters, // final CLTParameters clt_parameters,
use_ims_rotation, // final boolean compensate_ims_rotation,
inertial_only, // final boolean inertial_only,
quadCLTs[last_index], // final QuadCLT refClt,
quadCLTs, //final QuadCLT[] quadCLTs, //
debugLevel); // final int debugLevel)
atr_ims_center = new double[] {xyzatr_ims_center[0][0],xyzatr_ims_center[0][1],xyzatr_ims_center[0][2]};
if (debugLevel > -3) {
System.out.println("xyzatr_ims_center[0]: A="+xyzatr_ims_center[0][0]+", T="+xyzatr_ims_center[0][1]+", R="+xyzatr_ims_center[0][2]);
}
} else {
if (debugLevel > -3) {
System.out.println("Center CLT for CUAS mode does not exist, falling back to non-CUAS mode");
}
}
*/
// }
boolean OK = false;
int es1 = -1;
......@@ -5394,6 +5439,9 @@ public class OpticalFlow {
}
}
}
// FPN here?
} else {
if (!reuse_video) { // reuse_video only uses reference scene
boolean changed = quadCLTs[ref_index].isPhotometricUpdatedAndReset();
......@@ -5722,7 +5770,6 @@ public class OpticalFlow {
// update center_CLT.center_clt
int sensor_mask_clt = -1; // all
boolean condition_dsi = true;
boolean cuas_step_debug = clt_parameters.imp.cuas_step_debug;
Cuas.stepCenterClt(
clt_parameters, // CLTParameters clt_parameters,
quadCLTs, // QuadCLT [] quadCLTs,
......@@ -5976,15 +6023,10 @@ public class OpticalFlow {
false); // true); // boolean show);
center_CLT.setCenterAverage(imp_center_clt);
// just for verification
boolean cuas_debug = clt_parameters.imp.cuas_debug; // save debug images (and show them if not in batch mode)
if (cuas_debug) { // show_clt && !clt_parameters.batch_run) {
if (!clt_parameters.batch_run) {
imp_center_clt.show();
}
// ImagePlus imp_center_clt= center_CLT.showCenterClt(
// null, // float [][] fclt, // may be null
// clt_parameters, // CLTParameters clt_parameters,
// !clt_parameters.batch_run); // true); // boolean show);
if (imp_center_clt != null) {
String suffix =dbg_created?"-CLT-CREATED":"CLT-UPDATED";
center_CLT.saveImagePlusInModelDirectory(
......@@ -6003,7 +6045,6 @@ public class OpticalFlow {
Interscene.generateEgomotionTable(
clt_parameters, // CLTParameters clt_parameters,
quadCLTs, // QuadCLT [] quadCLTs,
// ref_index,// ref_indx,
master_CLT, // quadCLTs[ref_index], //QuadCLT ref_scene, // may be one of quadCLTs or center_CLT
earliest_scene, // int earliest_scene,
ego_path, // String path,
......@@ -6018,7 +6059,6 @@ public class OpticalFlow {
Interscene.generateEgomotionTable(
clt_parameters, // CLTParameters clt_parameters,
quadCLTs, // QuadCLT [] quadCLTs,
// ref_index,// ref_indx,
master_CLT, // quadCLTs[ref_index], //QuadCLT ref_scene, // may be one of quadCLTs or center_CLT
earliest_scene, // int earliest_scene,
ego_path, // String path,
......@@ -6091,25 +6131,6 @@ public class OpticalFlow {
// Testing vegetation, for debugging supposing that terrain layer is already set in *-INTER-INTRA-LMA.tiff - normally it is only set during 3d model generation
// Moved to the very end, after 3D
boolean test_vegetation = true;
/*
if (test_vegetation) { // limit start of the quadCLTs by reading start/end from the reference scene
int [] first_last = quadCLTs[ref_index].getFirstLastIndex(quadCLTs);
QuadCLT [] quadCLT_tail = new QuadCLT [quadCLTs.length - earliest_scene];
System.arraycopy(quadCLTs, earliest_scene, quadCLT_tail, 0, quadCLT_tail.length);
VegetationModel.test_vegetation(
clt_parameters, // CLTParameters clt_parameters,
quadCLT_tail, // QuadCLT [] quadCLTs,
ref_index-earliest_scene, // int ref_index,
debugLevel); // int debugLevel)
if (videos != null) {
videos[0] = new String[0];
}
// temporarily - exiting now
return quadCLTs[ref_index].getX3dTopDirectory();
}
*/
if (generate_mapped || reuse_video) { // modifies combo_dsn_final ?
int tilesX = master_CLT.getTileProcessor().getTilesX();
......@@ -6283,7 +6304,7 @@ public class OpticalFlow {
}
if (generate_mapped) {
double [][] ds_vantage = new double[][] {selected_disparity,selected_strength};
if ((views[ibase][0] != 0) || (views[ibase][1] != 0) || (views[ibase][2] != 0) || master_CLT.hasCenterClt()) {
if ((views[ibase][0] != 0) || (views[ibase][1] != 0) || (views[ibase][2] != 0) || (master_CLT.hasCenterClt()) && (mode3d > 0)) {
ds_vantage = transformCameraVew(
null, // (debug_ds_fg_virt?"transformCameraVew":null), // final String title,
ds_vantage, // final double [][] dsrbg_camera_in,
......@@ -6293,14 +6314,16 @@ public class OpticalFlow {
master_CLT, // quadCLTs[ref_index], // final QuadCLT reference_QuadClt,
8); // iscale); // final int iscale);
}
if (master_CLT.getFPN() != null) {
scenes_suffix += "-FPN";
}
float [] average_pixels = (master_CLT.getCenterAverage() != null) ? ((float []) master_CLT.getCenterAverage().getProcessor().getPixels()):null;
float [][] average_channels = new float [][] {average_pixels}; // for future color images
imp_scenes_pair[nstereo]= renderSceneSequence(
clt_parameters, // CLTParameters clt_parameters,
master_CLT.hasCenterClt(), // boolean mode_cuas,
false, // clt_parameters.imp.um_mono, // boolean um_mono,
false, // clt_parameters.imp.add_average, // boolean insert_average, // then add new parameter, keep add average
clt_parameters.imp.calculate_average, // boolean insert_average, // then add new parameter, keep add average
average_channels, // average_slice,
clt_parameters.imp.subtract_average, // boolean subtract_average,
clt_parameters.imp.running_average, // int running_average,
......@@ -6324,6 +6347,7 @@ public class OpticalFlow {
}
}
if (!toRGB && um_mono) {
scenes_suffix = imp_scenes_pair[nstereo].getTitle();
imp_scenes_pair[nstereo]=applyUM ( // apply UM
scenes_suffix+ um_suffix , // final String title, // should include -UM...
imp_scenes_pair[nstereo], // final ImagePlus imp,
......@@ -7958,8 +7982,8 @@ public class OpticalFlow {
suffix+="-DIFFAVG"+running_average;
}
if (mode_cuas) {
suffix+="-CUAS"; // add properties too? include offsets
suffix+=String.format("%6f:%6f:%6f", stereo_atr[0],stereo_atr[1],stereo_atr[2]);
suffix+="-cUAS"; // add properties too? include offsets
// suffix+=String.format("%6f:%6f:%6f", stereo_atr[0],stereo_atr[1],stereo_atr[2]);
}
if (!mb_en) {
suffix+="-NOMB"; // no motion blur
......@@ -8279,7 +8303,7 @@ public class OpticalFlow {
}
if (mode_cuas) {
suffix+="-CUAS"; // add properties too? include offsets
suffix+=String.format("%6f:%6f:%6f", stereo_atr[0],stereo_atr[1],stereo_atr[2]);
/// suffix+=String.format("%6f:%6f:%6f", stereo_atr[0],stereo_atr[1],stereo_atr[2]);
}
if (!mb_en) {
suffix+="-NOMB"; // no motion blur
......@@ -13601,7 +13625,7 @@ public class OpticalFlow {
image_dtt.getCorrelation2d();
double [][][][] dcorr_td = new double[tp_tasks.length][][][]; // [tile][pair][4][64] sparse by pair transform domain representation of corr pairs
image_dtt.quadCorrTD(
scenes[nscene].getImageData(), // final double [][][] image_data, // first index - number of image in a quad
scenes[nscene].getResetImageData(), // final double [][][] image_data, // first index - number of image in a quad
scenes[nscene].getErsCorrection().getSensorWH()[0], // final int width,
tp_tasks, // final TpTask [] tp_tasks,
clt_parameters.img_dtt, // final ImageDttParameters imgdtt_params, // Now just extra correlation parameters, later will include, most others
......@@ -14079,7 +14103,7 @@ public class OpticalFlow {
image_dtt.getCorrelation2d();
double [][][][] dcorr_td = new double[tp_tasks.length][][][]; // [tile][pair][4][64] sparse by pair transform domain representation of corr pairs
image_dtt.quadCorrTD(
scenes[nscene].getImageData(), // final double [][][] image_data, // first index - number of image in a quad
scenes[nscene].getResetImageData(), // final double [][][] image_data, // first index - number of image in a quad
scenes[nscene].getErsCorrection().getSensorWH()[0], // final int width,
tp_tasks, // final TpTask [] tp_tasks,
clt_parameters.img_dtt, // final ImageDttParameters imgdtt_params, // Now just extra correlation parameters, later will include, most others
......@@ -14456,7 +14480,7 @@ public class OpticalFlow {
image_dtt.getCorrelation2d();
double [][][][] dcorr_td = new double[tp_tasks.length][][][]; // [tile][pair][4][64] sparse by pair transform domain representation of corr pairs
image_dtt.quadCorrTD(
ref_scene.getImageData(), // final double [][][] image_data, // first index - number of image in a quad
ref_scene.getResetImageData(), // final double [][][] image_data, // first index - number of image in a quad
ref_scene.getErsCorrection().getSensorWH()[0], // final int width,
tp_tasks, // final TpTask [] tp_tasks,
clt_parameters.img_dtt, // final ImageDttParameters imgdtt_params, // Now just extra correlation parameters, later will include, most others
......
......@@ -137,6 +137,8 @@ public class QuadCLTCPU {
public static final String CENTER_DIR_SUFFIX = "-CENTER";
public static final String CENTER_CLT_SUFFIX = "-CLT";
public static final String CENTER_FPN_SUFFIX = "-FPN";
// public GPUTileProcessor.GpuQuad gpuQuad = null;
......@@ -165,7 +167,10 @@ public class QuadCLTCPU {
public String image_name = null;
public String image_path = null;
double [] gps_lla = null;
public double [][][] image_data = null;
public double [][][] image_data = null; // [channel][color][pixel]
public double [][][] image_fpn = null; // [channel][color][pixel] Shared by images in a series, subtracted during conditioning
// and applied to image_data;
public double [][][] image_fpn_applied = null; // [channel][color][pixel] // when applying different instance, the old will be unapplied
boolean new_image_data = false;
boolean [][] saturation_imp = null; // (near) saturated pixels or null
boolean is_aux = false;
......@@ -6226,30 +6231,8 @@ public class QuadCLTCPU {
return file_path;
}
/*
// directory with model version, not the current version
public double [][] readDoubleArrayFromThisModelDirectory(
String suffix,
int num_slices, // (0 - all)
int [] wh)
{
float [][] fdata = readFloatArrayFromThisModelDirectory(
suffix, // String suffix,
num_slices, // int num_slices, // (0 - all)
wh); // int [] wh)
if (fdata == null) {
return null;
}
double [][] result = new double [fdata.length][];
for (int n = 0; n < fdata.length; n++) if (fdata[n]!=null) {
result[n] = new double [fdata[n].length];
for (int i = 0; i < fdata[n].length; i++) {
result[n][i] = fdata[n][i];
}
}
return result;
}
*/
public double [][] readDoubleArrayFromModelDirectory(
String suffix,
......@@ -6475,10 +6458,20 @@ public class QuadCLTCPU {
public boolean hasNewImageData() {
return new_image_data;
}
public double [][][] getImageData(){
public double [][][] getResetImageData(){
new_image_data = false;
return image_data;
}
public double [][][] getImageData(){ // does not reset new data
return image_data;
}
public void setImageData(double [][][] data) {
image_data = data;
new_image_data = true;
image_fpn_applied = null;
image_fpn = null;
}
// magic scale should be set before using TileProcessor (calculated disparities depend on it)
......@@ -9852,8 +9845,9 @@ public class QuadCLTCPU {
image_name = (String) imp_srcs[0].getProperty("name");
image_path= (String) imp_srcs[0].getProperty("path");
this.saturation_imp = saturation_imp;
image_data = new double [imp_srcs.length][][];
this.new_image_data = true;
// image_data = new double [imp_srcs.length][][];
// this.new_image_data = true;
setImageData(new double [imp_srcs.length][][]);
ai.set(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
......@@ -9877,6 +9871,7 @@ public class QuadCLTCPU {
};
}
ImageDtt.startAndJoin(threads);
applyFPN(); // if non-null, fpn should be set for each scene with setFPN (double [][][] fpn)
setTiles (imp_srcs[0], // set global tp.tilesX, tp.tilesY
getNumSensors(), // tp.getNumSensors(),
clt_parameters,
......@@ -9886,6 +9881,271 @@ public class QuadCLTCPU {
return imp_srcs;
}
public double [][][] getAppliedFPN(){
return image_fpn_applied;
}
public double [][][] getFPN(){
return this.image_fpn;
}
public void setFPN (double [][][] fpn) {
this.image_fpn = fpn;
}
public void setApplyFPN(
QuadCLT [] quadCLTs,
double [][][] fpn) { // can be null to reset
setImageData(fpn);
setFPN(fpn);
for (int nscene = 0; nscene < quadCLTs.length; nscene++) if (quadCLTs[nscene] != null){
quadCLTs[nscene].setFPN(fpn);
quadCLTs[nscene].applyFPN();
}
return;
}
public void applyFPN() {
applyFPN(this.image_fpn);
}
public void applyFPN(
double [][][] fpn) {
if (image_fpn_applied != fpn) {
if (image_data == null) {
System.out.println("applyFPN (): image_data==null");
return;
}
final Thread[] threads = ImageDtt.newThreadArray(ImageDtt.THREADS_MAX);
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nChn = ai.getAndIncrement(); nChn < image_data.length; nChn = ai.getAndIncrement()) {
for (int ncol = 0; ncol < image_data[nChn].length; ncol++) {
for (int npix = 0; npix < image_data[nChn][ncol].length; npix++) {
if (image_fpn_applied != null) {
image_data[nChn][ncol][npix] += image_fpn_applied[nChn][ncol][npix];
}
if (fpn != null) {
image_data[nChn][ncol][npix] -= fpn[nChn][ncol][npix];
}
}
}
}
}
};
}
ImageDtt.startAndJoin(threads);
image_fpn_applied = fpn;
this.new_image_data = true;
}
return;
}
public ImagePlus saveShowFPN(
double [][][] fpn,
int width,
boolean save,
boolean show) {
ImagePlus imp = showFPN(
fpn, // double [][][] fpn,
width, // int width,
show); // boolean show)
if (save && (imp != null)) {
saveImagePlusInModelDirectory(
CENTER_FPN_SUFFIX, // String suffix, // null - use title from the imp
imp); // ImagePlus imp)
}
return imp;
}
public static ImagePlus showFPN(
double [][][] fpn,
int width,
boolean show) {
int num_sens = fpn.length;
int num_colors = -1; // fpn[0].length;
for (int i = 0; i < num_sens; i++) {
if (fpn[i] != null) {
num_colors = fpn[i].length;
break;
}
}
String [] titles = new String[num_sens];
String [] top_titles = new String[num_colors];
for (int ncol = 0; ncol < num_colors;ncol++) {
top_titles[ncol] = "Color-"+ncol;
}
double [][][] img_data = new double [num_colors][num_sens][];
for (int nsens = 0; nsens < num_sens; nsens++) if (fpn[nsens] != null) {
titles[nsens]= "Sens-"+nsens;
for (int ncol = 0; ncol < num_colors;ncol++) {
img_data [ncol][nsens] = fpn[nsens][ncol];
}
}
String fpn_title = "FPN_data";
ImagePlus imp = ShowDoubleFloatArrays.showArraysHyperstack(
img_data, // double[][][] pixels,
width, // int width,
fpn_title, // String title, "time_derivs-rt"+diff_time_rt+"-rxy"+diff_time_rxy,
titles, // String [] titles, // all slices*frames titles or just slice titles or null
top_titles, // String [] frame_titles, // frame titles or null
show); // boolean show)
return imp;
}
public double [][][] readImageFPN (
int sens_mask){
int [] wh = new int [2];
double [][] fpn_data2 = readDoubleArrayFromModelDirectory(
CENTER_FPN_SUFFIX, // String suffix,
0, // int num_slices, // (0 - all)
wh); // int [] wh)
if (fpn_data2 == null) {
return null;
}
int num_sensors = getNumSensors();
int used_sensors = 0;
for (int nsens = 0; nsens < num_sensors; nsens++) if ( (sens_mask & (1 << nsens)) != 0) {
used_sensors++;
}
int num_colors = fpn_data2.length/used_sensors;
double [][][] fpn = new double [num_sensors][num_colors][];
int isens = 0;
for (int nsens = 0; nsens < num_sensors; nsens++) if ( (sens_mask & (1 << nsens)) != 0) {
for (int ncol = 0; ncol < num_colors; ncol++) {
fpn[nsens][ncol] = fpn_data2[ncol * used_sensors + isens];
}
isens++;
}
return fpn;
}
/**
* Calculate sensors FPN in a simple way average each channel/color for a scene sequence. Intended to be
* used in CUAS mode averaging several (normally 2 full periods of rotation.
* TODO: make more accurate and universal by back-propagating the corrected image and subtracting per-sensor
* versions before averaging.
* @param quadCLTs sequence of scenes with conditioned images (getImageData() != null), without FPN applied
* @param range a first_last scene index pair. May be adjusted to include an integer number of rotations.
* @param sensor_mask bitmask which sensors to process (normally -1 - all)
* @param debugLevel debug level
* @return [sensor][color][pixel] average FPN image. May be saved as image_data to a virtual (center) scene
* (a QuadCLT instance).
*/
public static double [][][] calculateFPN(
final QuadCLT [] quadCLTs,
final int [] range, // required
final int sensor_mask,
final int debugLevel){
QuadCLT first_scene = quadCLTs[range[0]];
final int num_sens = first_scene.getNumSensors();
final int num_colors = first_scene.getNumColors();
final int width = first_scene.getTilesX()*first_scene.getTileSize();
final int height = first_scene.getTilesY()*first_scene.getTileSize();
final int num_pix = width*height;
final int num_scenes = range[1]-range[0]+1;
final double [][][] fpn = new double [num_sens][num_colors][num_pix];
final Thread[] threads = ImageDtt.newThreadArray();
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nChn = ai.getAndIncrement(); nChn < num_sens; nChn = ai.getAndIncrement()) if (((sensor_mask >> nChn) & 1) != 0) {
for (int nscene = range[0]; nscene <= range[1]; nscene++) {
for (int ncol = 0; ncol < num_colors; ncol++) {
double [] img_slice = quadCLTs[nscene].getImageData()[nChn][ncol];
for (int npix = 0; npix < num_pix; npix++) {
fpn[nChn][ncol][npix] += img_slice[npix];
}
}
}
double scale = 1.0/num_scenes;
for (int ncol = 0; ncol < num_colors; ncol++) {
for (int npix = 0; npix < num_pix; npix++) {
fpn[nChn][ncol][npix] *= scale;
}
}
}
}
};
}
ImageDtt.startAndJoin(threads);
return fpn;
}
public ImagePlus debugFPN(
QuadCLT [] quadCLTs,
double [][][] fpn,
int [] range,
int nsens,
boolean show) {
int fpn_width =getTilesX()*getTileSize(); // see if center_CLT can be used
String dbg_title = getImageName()+"-DEBUG_FPN_SENS_"+nsens;
ImagePlus imp=QuadCLT.debugFPN(
quadCLTs, // final QuadCLT [] quadCLTs,
fpn, // final double [][][] fpn,
range, // final int [] range, // required
nsens, // final int nsens,
fpn_width, // final int width,
dbg_title); // final String title)
if (imp != null) {
saveImagePlusInModelDirectory(
dbg_title, // String suffix, // null - use title from the imp
imp); // ImagePlus imp)
if (show) {
imp.show();
}
}
return imp;
}
public static ImagePlus debugFPN(
final QuadCLT [] quadCLTs,
final double [][][] fpn,
final int [] range, // required
final int nsens,
final int width,
final String title){
int ncol = 0;
int num_pix = quadCLTs[range[0]].getImageData()[nsens][ncol].length;
double [][][] data = new double [2][range[1]-range[0]+1][num_pix];
String [] titles_top = {"src","src-fpn"};
String [] titles = new String [range[1]-range[0]+1];
for (int nscene = range[0]; nscene <= range[1]; nscene++) {
int iscene = nscene - range[0];
titles [iscene] = quadCLTs[nscene].getImageName();
data[0][iscene] = quadCLTs[nscene].getImageData()[nsens][ncol].clone();
data[1][iscene] = data[0][iscene].clone();
for (int i = 0; i < num_pix; i++) {
data[1][iscene][i] -= fpn[nsens][ncol][i];
}
}
ImagePlus imp = ShowDoubleFloatArrays.showArraysHyperstack(
data, // double[][][] pixels,
width, // int width,
title, // String title, "time_derivs-rt"+diff_time_rt+"-rxy"+diff_time_rxy,
titles, // String [] titles, // all slices*frames titles or just slice titles or null
titles_top, // String [] frame_titles, // frame titles or null
false); // boolean show)
return imp;
}
public void processCLTQuadCorrs( // not used in lwir
CLTParameters clt_parameters,
......@@ -9893,7 +10153,7 @@ public class QuadCLTCPU {
ColorProcParameters colorProcParameters,
CorrectionColorProc.ColorGainsParameters channelGainParameters,
EyesisCorrectionParameters.RGBParameters rgbParameters,
// int convolveFFTSize, // 128 - fft size, kernel size should be size/2
// int convolveFFTSize, // 128 - fft size, kernel size should be size/2
final boolean apply_corr, // calculate and apply additional fine geometry correction
final boolean infinity_corr, // calculate and apply geometry correction at infinity
final int threadsMax, // maximal number of threads to launch
......@@ -9917,7 +10177,7 @@ public class QuadCLTCPU {
}
// multiply each image by this and divide by individual (if not NaN)
double [] referenceExposures = null;
// if (!colorProcParameters.lwir_islwir) {
// if (!colorProcParameters.lwir_islwir) {
if (!isLwir()) {
referenceExposures=eyesisCorrections.calcReferenceExposures(debugLevel);
}
......@@ -9926,7 +10186,7 @@ public class QuadCLTCPU {
boolean [][] saturation_imp = (clt_parameters.sat_level > 0.0)? new boolean[channelFiles.length][] : null;
double [] scaleExposures = new double[channelFiles.length];
// ImagePlus [] imp_srcs =
// ImagePlus [] imp_srcs =
conditionImageSet(
clt_parameters, // EyesisCorrectionParameters.CLTParameters clt_parameters,
colorProcParameters,
......@@ -9942,7 +10202,7 @@ public class QuadCLTCPU {
// once per quad here
processCLTQuadCorrCPU( // returns ImagePlus, but it already should be saved/shown
// imp_srcs, // [srcChannel], // should have properties "name"(base for saving results), "channel","path"
// imp_srcs, // [srcChannel], // should have properties "name"(base for saving results), "channel","path"
saturation_imp, // boolean [][] saturation_imp, // (near) saturated pixels or null
clt_parameters,
debayerParameters,
......@@ -9976,7 +10236,7 @@ public class QuadCLTCPU {
ColorProcParameters colorProcParameters,
CorrectionColorProc.ColorGainsParameters channelGainParameters,
EyesisCorrectionParameters.RGBParameters rgbParameters,
// int convolveFFTSize, // 128 - fft size, kernel size should be size/2
// int convolveFFTSize, // 128 - fft size, kernel size should be size/2
final boolean apply_corr, // calculate and apply additional fine geometry correction
final boolean infinity_corr, // calculate and apply geometry correction at infinity
final int threadsMax, // maximal number of threads to launch
......@@ -10000,7 +10260,7 @@ public class QuadCLTCPU {
}
// multiply each image by this and divide by individual (if not NaN)
double [] referenceExposures = null;
// if (!colorProcParameters.lwir_islwir) {
// if (!colorProcParameters.lwir_islwir) {
if (!isLwir()) {
referenceExposures=eyesisCorrections.calcReferenceExposures(debugLevel);
}
......@@ -10057,7 +10317,7 @@ public class QuadCLTCPU {
ColorProcParameters colorProcParameters,
CorrectionColorProc.ColorGainsParameters channelGainParameters,
EyesisCorrectionParameters.RGBParameters rgbParameters,
// int convolveFFTSize, // 128 - fft size, kernel size should be size/2
// int convolveFFTSize, // 128 - fft size, kernel size should be size/2
final boolean apply_corr, // calculate and apply additional fine geometry correction
final boolean infinity_corr, // calculate and apply geometry correction at infinity
final int threadsMax, // maximal number of threads to launch
......@@ -10081,7 +10341,7 @@ public class QuadCLTCPU {
}
// multiply each image by this and divide by individual (if not NaN)
double [] referenceExposures = null;
// if (!colorProcParameters.lwir_islwir) {
// if (!colorProcParameters.lwir_islwir) {
if (!isLwir()) {
referenceExposures=eyesisCorrections.calcReferenceExposures(debugLevel);
}
......@@ -10179,7 +10439,7 @@ public class QuadCLTCPU {
int nFile=channelFiles[srcChannel];
if (nFile >=0){
for (int i = 0; i < avr_pix[srcChannel].length; i++) avr_pix[srcChannel][i] = 0;
// int [] num_nonsat = {0,0,0};
// int [] num_nonsat = {0,0,0};
float [] pixels=(float []) imp_srcs[srcChannel].getProcessor().getPixels();
int width = imp_srcs[srcChannel].getWidth();
int height = imp_srcs[srcChannel].getHeight();
......@@ -10449,7 +10709,6 @@ public class QuadCLTCPU {
};
}
ImageDtt.startAndJoin(threads);
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment