Commit 7d6fb681 authored by Andrey Filippov's avatar Andrey Filippov

First version of MB correction, changing format

parent e4c6d901
...@@ -3719,7 +3719,6 @@ public class GpuQuad{ // quad camera description ...@@ -3719,7 +3719,6 @@ public class GpuQuad{ // quad camera description
//change to fixed 511? //change to fixed 511?
final int task_code = ((1 << num_pairs)-1) << GPUTileProcessor.TASK_CORR_BITS; // correlation only final int task_code = ((1 << num_pairs)-1) << GPUTileProcessor.TASK_CORR_BITS; // correlation only
final double min_px = margin; final double min_px = margin;
// final double max_px = img_width - 1 - margin;
final double max_px = geometryCorrection.getSensorWH()[0] - 1 - margin; // sensor width here, not window width final double max_px = geometryCorrection.getSensorWH()[0] - 1 - margin; // sensor width here, not window width
final double [] min_py = new double[num_cams] ; final double [] min_py = new double[num_cams] ;
final double [] max_py = new double[num_cams] ; final double [] max_py = new double[num_cams] ;
...@@ -3727,7 +3726,6 @@ public class GpuQuad{ // quad camera description ...@@ -3727,7 +3726,6 @@ public class GpuQuad{ // quad camera description
min_py [i] = margin + (calcPortsCoordinatesAndDerivatives? geometryCorrection.getWOITops()[i] : 0); min_py [i] = margin + (calcPortsCoordinatesAndDerivatives? geometryCorrection.getWOITops()[i] : 0);
// camera_heights array is only set during conditionImageSet(), not called by the intersceneAccumulate() // camera_heights array is only set during conditionImageSet(), not called by the intersceneAccumulate()
// That was correct, as all scenes should be conditioned // That was correct, as all scenes should be conditioned
// max_py [i] = geometryCorrection.getWOITops()[i] + geometryCorrection.getCameraHeights()[i] - 1 - margin;
max_py [i] = geometryCorrection.getSensorWH()[1] - 1 - margin; // same for all channels? max_py [i] = geometryCorrection.getSensorWH()[1] - 1 - margin; // same for all channels?
//.getSensorWH()[0] //.getSensorWH()[0]
} }
...@@ -3807,11 +3805,167 @@ public class GpuQuad{ // quad camera description ...@@ -3807,11 +3805,167 @@ public class GpuQuad{ // quad camera description
public static TpTask[][] setInterTasksMotionBlur(
final int num_cams,
final int img_width, // should match pXpYD
final boolean calcPortsCoordinatesAndDerivatives, // GPU can calculate them centreXY
final double [][] pXpYD, // per-tile array of pX,pY,disparity triplets (or nulls)
final boolean [] selection, // may be null, if not null do not process unselected tiles
// motion blur compensation
final double mb_tau, // 0.008; // time constant, sec
final double mb_max_gain, // 5.0; // motion blur maximal gain (if more - move second point more than a pixel
final double [][] mb_vectors, //
final GeometryCorrection geometryCorrection,
final double disparity_corr,
final int margin, // do not use tiles if their centers are closer to the edges
final boolean [] valid_tiles,
final int threadsMax) // maximal number of threads to launch
{
int num_pairs = Correlation2d.getNumPairs(num_cams);
//change to fixed 511?
final int task_code = ((1 << num_pairs)-1) << GPUTileProcessor.TASK_CORR_BITS; // correlation only
final double min_px = margin;
final double max_px = geometryCorrection.getSensorWH()[0] - 1 - margin; // sensor width here, not window width
final double [] min_py = new double[num_cams] ;
final double [] max_py = new double[num_cams] ;
for (int i = 0; i < num_cams; i++) {
min_py [i] = margin + (calcPortsCoordinatesAndDerivatives? geometryCorrection.getWOITops()[i] : 0);
// camera_heights array is only set during conditionImageSet(), not called by the intersceneAccumulate()
// That was correct, as all scenes should be conditioned
max_py [i] = geometryCorrection.getSensorWH()[1] - 1 - margin; // same for all channels?
//.getSensorWH()[0]
}
if (valid_tiles!=null) {
Arrays.fill(valid_tiles, false);
}
final int tilesX = img_width / GPUTileProcessor.DTT_SIZE;
final int tiles = pXpYD.length;
final Matrix [] corr_rots = geometryCorrection.getCorrVector().getRotMatrices(); // get array of per-sensor rotation matrices
final int quad_main = (geometryCorrection != null)? num_cams:0;
final Thread[] threads = ImageDtt.newThreadArray(threadsMax);
final AtomicInteger ai = new AtomicInteger(00);
final AtomicInteger aTiles = new AtomicInteger(0);
final TpTask[][] tp_tasks = new TpTask[2][tiles]; // aTiles.get()]; // [0] - main, [1] - shifted
final double mb_len_scale = -Math.log(1.0 - 1.0/mb_max_gain);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
@Override
public void run() {
for (int nTile = ai.getAndIncrement(); nTile < tiles; nTile = ai.getAndIncrement())
if ((pXpYD[nTile] != null) && (mb_vectors[nTile] != null) && ((selection == null) || selection[nTile])) {
int tileY = nTile / tilesX;
int tileX = nTile % tilesX;
TpTask tp_task = new TpTask(num_cams, tileX, tileY);
TpTask tp_task_sub = new TpTask(num_cams, tileX, tileY);
tp_task.task = task_code;
tp_task_sub.task = task_code;
double disparity = pXpYD[nTile][2] + disparity_corr;
tp_task.target_disparity = (float) disparity; // will it be used?
tp_task_sub.target_disparity = tp_task.target_disparity; // will it be used?
double [] centerXY = pXpYD[nTile];
tp_task.setCenterXY(centerXY); // this pair of coordinates will be used by GPU to set tp_task.xy and task.disp_dist!
// calculate offset for the secondary tile and weigh
double dx = mb_vectors[nTile][0];
double dy = mb_vectors[nTile][1];
double mb_len = Math.sqrt(dx*dx+dy*dy); // in pixels/s
dx /= mb_len; // unit vector
dy /= mb_len;
mb_len *= mb_tau; // now in pixels
double mb_offs = 1.0; // try 1 pixel. Maybe adjust for non-ortho, e.g. sqrt(2) for diagonal?
double min_offs = mb_len_scale * mb_len;
if (mb_offs < min_offs) {
mb_offs = min_offs;
}
dx *= mb_offs;
dy *= mb_offs;
double [] centerXY_sub = {centerXY[0]+dx,centerXY[1]+dy};
tp_task_sub.setCenterXY(centerXY_sub);
double exp_offs = Math.exp(-mb_offs/mb_len);
double gain = 1.0/(1.0 - exp_offs);
double gain_sub = -gain * exp_offs;
tp_task.setScale(gain);
tp_task_sub.setScale(gain_sub);
boolean bad_margins = false;
if (calcPortsCoordinatesAndDerivatives) { // for non-GPU?
double [][] disp_dist = new double[quad_main][]; // used to correct 3D correlations (not yet used here)
double [][] centersXY_main = geometryCorrection.getPortsCoordinatesAndDerivatives(
geometryCorrection, // GeometryCorrection gc_main,
false, // boolean use_rig_offsets,
corr_rots, // Matrix [] rots,
null, // Matrix [][] deriv_rots,
null, // double [][] pXYderiv, // if not null, should be double[8][]
disp_dist, // used to correct 3D correlations
centerXY[0],
centerXY[1],
disparity); // + disparity_corr);
tp_task.setDispDist(disp_dist);
tp_task.xy = new float [centersXY_main.length][2];
for (int i = 0; i < centersXY_main.length; i++) {
if ( (centersXY_main[i][0] < min_px) || (centersXY_main[i][0] > max_px) ||
(centersXY_main[i][1] < min_py[i]) || (centersXY_main[i][1] > max_py[i])) {
bad_margins = true;
break;
}
tp_task.xy[i][0] = (float) centersXY_main[i][0];
tp_task.xy[i][1] = (float) centersXY_main[i][1];
}
// same for the second entry
double [][] disp_dist_sub = new double[quad_main][]; // used to correct 3D correlations (not yet used here)
double [][] centersXY_main_sub = geometryCorrection.getPortsCoordinatesAndDerivatives(
geometryCorrection, // GeometryCorrection gc_main,
false, // boolean use_rig_offsets,
corr_rots, // Matrix [] rots,
null, // Matrix [][] deriv_rots,
null, // double [][] pXYderiv, // if not null, should be double[8][]
disp_dist_sub, // used to correct 3D correlations
centerXY_sub[0],
centerXY_sub[1],
disparity); // + disparity_corr);
tp_task_sub.setDispDist(disp_dist);
tp_task_sub.xy = new float [centersXY_main.length][2];
for (int i = 0; i < centersXY_main.length; i++) {
if ( (centersXY_main[i][0] < min_px) || (centersXY_main[i][0] > max_px) ||
(centersXY_main[i][1] < min_py[i]) || (centersXY_main[i][1] > max_py[i])) {
bad_margins = true;
break;
}
tp_task_sub.xy[i][0] = (float) centersXY_main_sub[i][0];
tp_task_sub.xy[i][1] = (float) centersXY_main_sub[i][1];
}
} else { // only check center for margins
if ( (centerXY[0] < min_px) || (centerXY[0] > max_px) ||
(centerXY[1] < min_py[0]) || (centerXY[1] > max_py[0]) ||
(centerXY_sub[0] < min_px) || (centerXY_sub[0] > max_px) ||
(centerXY_sub[1] < min_py[0]) || (centerXY_sub[1] > max_py[0])) {
bad_margins = true;
// break;
}
}
if (bad_margins) {
continue;
}
int tp_task_index = aTiles.getAndIncrement();
tp_tasks[0][tp_task_index] = tp_task;
tp_tasks[1][tp_task_index] = tp_task_sub;
if (valid_tiles!=null) {
valid_tiles[nTile] = true;
}
}
}
};
}
ImageDtt.startAndJoin(threads);
final TpTask[][] tp_tasks_out = new TpTask[2][aTiles.get()];
System.arraycopy(tp_tasks[0], 0, tp_tasks_out[0], 0, tp_tasks_out[0].length);
System.arraycopy(tp_tasks[1], 0, tp_tasks_out[1], 0, tp_tasks_out[1].length);
return tp_tasks_out;
}
public void setLpfRbg( public void setLpfRbg(
float [][] lpf_rbg, // 4 64-el. arrays: r,b,g,m float [][] lpf_rbg, // 4 64-el. arrays: r,b,g,m
......
...@@ -11,13 +11,28 @@ public class TpTask { ...@@ -11,13 +11,28 @@ public class TpTask {
public float[][] xy_aux = null; public float[][] xy_aux = null;
public float [][] disp_dist = null; public float [][] disp_dist = null;
// public float weight; // public float weight;
public float scale = 0.0f; // for motion blur correction
// 0.0 - set (as it was). >0 multiply and set. <0 multiply and accumulate
public static int getSize(int num_sensors) { public static int getSize(int num_sensors) {
return 5 + 2* num_sensors + 4 * num_sensors; // return 5 + 2* num_sensors + 4 * num_sensors;
return 6 + 2* num_sensors + 4 * num_sensors; // added scale
} }
public int getSize() { public int getSize() {
return 5 + 2* num_sensors + 4 * num_sensors; // return 5 + 2* num_sensors + 4 * num_sensors;
return getSize(num_sensors);
} }
public void setScale(float scale) {
this.scale = scale;
}
public void setScale(double scale) {
this.scale = (float) scale;
}
public float getScale() {
return scale;
}
public TpTask( public TpTask(
int num_sensors, int num_sensors,
...@@ -54,6 +69,7 @@ public class TpTask { ...@@ -54,6 +69,7 @@ public class TpTask {
target_disparity = flt[indx++]; // 2 target_disparity = flt[indx++]; // 2
centerXY[0] = flt[indx++]; // 3 centerXY[0] = flt[indx++]; // 3
centerXY[1] = flt[indx++]; // 4 centerXY[1] = flt[indx++]; // 4
scale = flt[indx++]; // 5
if (use_aux) { if (use_aux) {
xy_aux = new float[num_sensors][2]; xy_aux = new float[num_sensors][2];
for (int i = 0; i < num_sensors; i++) { for (int i = 0; i < num_sensors; i++) {
...@@ -165,7 +181,7 @@ public class TpTask { ...@@ -165,7 +181,7 @@ public class TpTask {
flt[indx++] = this.target_disparity; // 2 flt[indx++] = this.target_disparity; // 2
flt[indx++] = centerXY[0]; // 3 flt[indx++] = centerXY[0]; // 3
flt[indx++] = centerXY[1]; // 4 flt[indx++] = centerXY[1]; // 4
flt[indx++] = scale; // 5
float [][] offsets = use_aux? this.xy_aux: this.xy; float [][] offsets = use_aux? this.xy_aux: this.xy;
for (int i = 0; i < num_sensors; i++) { for (int i = 0; i < num_sensors; i++) {
if (offsets != null) { if (offsets != null) {
......
...@@ -1199,12 +1199,56 @@ public class ImageDtt extends ImageDttCPU { ...@@ -1199,12 +1199,56 @@ public class ImageDtt extends ImageDttCPU {
gpuQuad.updateTasks( gpuQuad.updateTasks(
tp_tasks, tp_tasks,
false); // boolean use_aux // while is it in class member? - just to be able to free false); // boolean use_aux // while is it in class member? - just to be able to free
// Skipping if ((fdisp_dist != null) || (fpxpy != null)) {...
// int [] wh = null;
// int erase_clt = 1; // NaN;
gpuQuad.execConvertDirect(use_reference_buffer, wh, erase_clt); // put results into a "reference" buffer gpuQuad.execConvertDirect(use_reference_buffer, wh, erase_clt); // put results into a "reference" buffer
} }
public void setReferenceTDMotionBlur(
final int erase_clt,
final int [] wh, // null (use sensor dimensions) or pair {width, height} in pixels
final ImageDttParameters imgdtt_params, // Now just extra correlation parameters, later will include, most others
final boolean use_reference_buffer,
final TpTask[][] tp_tasks,
final double gpu_sigma_r, // 0.9, 1.1
final double gpu_sigma_b, // 0.9, 1.1
final double gpu_sigma_g, // 0.6, 0.7
final double gpu_sigma_m, // = 0.4; // 0.7;
final int threadsMax, // maximal number of threads to launch
final int globalDebugLevel)
{
final float [][] lpf_rgb = new float[][] {
floatGetCltLpfFd(gpu_sigma_r),
floatGetCltLpfFd(gpu_sigma_b),
floatGetCltLpfFd(gpu_sigma_g),
floatGetCltLpfFd(gpu_sigma_m)
};
gpuQuad.setLpfRbg( // constants memory - same for all cameras
lpf_rgb,
globalDebugLevel > 2);
gpuQuad.setTasks( // copy tp_tasks to the GPU memory
tp_tasks[0], // TpTask [] tile_tasks,
false, // use_aux); // boolean use_aux)
imgdtt_params.gpu_verify); // boolean verify
// Why always NON-UNIFORM grid? Already set in tp_tasks
gpuQuad.execSetTilesOffsets(false); // false); // prepare tiles offsets in GPU memory, using NON-UNIFORM grid (pre-calculated)
// update tp_tasks
gpuQuad.updateTasks(
tp_tasks[0],
false); // boolean use_aux // while is it in class member? - just to be able to free
gpuQuad.execConvertDirect(use_reference_buffer, wh, erase_clt); // put results into a "reference" buffer
// second tasks (subtracting MB)
gpuQuad.setTasks( // copy tp_tasks to the GPU memory
tp_tasks[1], // TpTask [] tile_tasks,
false, // use_aux); // boolean use_aux)
imgdtt_params.gpu_verify); // boolean verify
// Why always NON-UNIFORM grid? Already set in tp_tasks
gpuQuad.execSetTilesOffsets(false); // false); // prepare tiles offsets in GPU memory, using NON-UNIFORM grid (pre-calculated)
// update tp_tasks
gpuQuad.updateTasks(
tp_tasks[1],
false); // boolean use_aux // while is it in class member? - just to be able to free
gpuQuad.execConvertDirect(use_reference_buffer, wh, -1); // erase_clt); // put results into a "reference" buffer
}
......
...@@ -43,6 +43,12 @@ public class IntersceneLma { ...@@ -43,6 +43,12 @@ public class IntersceneLma {
this.thread_invariant = thread_invariant; this.thread_invariant = thread_invariant;
this.opticalFlow = opticalFlow; this.opticalFlow = opticalFlow;
} }
public double [][] getLastJT(){
return last_jt;
}
public double[] getLastRms() { public double[] getLastRms() {
return last_rms; return last_rms;
} }
...@@ -171,19 +177,9 @@ public class IntersceneLma { ...@@ -171,19 +177,9 @@ public class IntersceneLma {
final int debug_level) final int debug_level)
{ {
scenesCLT = new QuadCLT [] {reference_QuadClt, scene_QuadClt}; scenesCLT = new QuadCLT [] {reference_QuadClt, scene_QuadClt};
// this.vector_XYS = vector_XYS;
par_mask = param_select; par_mask = param_select;
macrotile_centers = centers; macrotile_centers = centers;
num_samples = 2 * centers.length; num_samples = 2 * centers.length;
/*
for (int i = 0; i < vector_XYS.length; i++){
if (((vector_XYS[i] == null) && (centers[i]!=null)) ||
((vector_XYS[i] != null) && (centers[i]==null))) {
vector_XYS[i] = null;
centers[i]= null;
}
}
*/
ErsCorrection ers_ref = reference_QuadClt.getErsCorrection(); ErsCorrection ers_ref = reference_QuadClt.getErsCorrection();
ErsCorrection ers_scene = scene_QuadClt.getErsCorrection(); ErsCorrection ers_scene = scene_QuadClt.getErsCorrection();
final double [] scene_xyz = (scene_xyz0 != null) ? scene_xyz0 : ers_scene.camera_xyz; final double [] scene_xyz = (scene_xyz0 != null) ? scene_xyz0 : ers_scene.camera_xyz;
...@@ -201,19 +197,22 @@ public class IntersceneLma { ...@@ -201,19 +197,22 @@ public class IntersceneLma {
scene_atr[0], scene_atr[1], scene_atr[2], scene_atr[0], scene_atr[1], scene_atr[2],
scene_xyz[0], scene_xyz[1], scene_xyz[2]}; scene_xyz[0], scene_xyz[1], scene_xyz[2]};
parameters_full = full_parameters_vector.clone(); parameters_full = full_parameters_vector.clone();
if (first_run || (backup_parameters_full == null)) { if ((vector_XYS != null) && (first_run || (backup_parameters_full == null))) {
backup_parameters_full = full_parameters_vector.clone(); backup_parameters_full = full_parameters_vector.clone();
} }
int num_pars = 0; int num_pars = 0;
for (int i = 0; i < par_mask.length; i++) if (par_mask[i]) num_pars++; for (int i = 0; i < par_mask.length; i++) if (par_mask[i]) num_pars++;
par_indices = new int [num_pars]; par_indices = new int [num_pars];
num_pars = 00; num_pars = 0;
for (int i = 0; i < par_mask.length; i++) if (par_mask[i]) par_indices[num_pars++] = i; for (int i = 0; i < par_mask.length; i++) if (par_mask[i]) par_indices[num_pars++] = i;
parameters_vector = new double [par_indices.length]; parameters_vector = new double [par_indices.length];
for (int i = 0; i < par_indices.length; i++) parameters_vector[i] = full_parameters_vector[par_indices[i]]; for (int i = 0; i < par_indices.length; i++) parameters_vector[i] = full_parameters_vector[par_indices[i]];
// parameters_initial = parameters_vector.clone();
if (vector_XYS != null) {// skip when used for the motion blur vectors, not LMA
setSamplesWeights(vector_XYS); // not regularized yet ! setSamplesWeights(vector_XYS); // not regularized yet !
} else {
weights = null; // new double[2 * centers.length];
}
last_jt = new double [parameters_vector.length][]; last_jt = new double [parameters_vector.length][];
if (debug_level > 1) { if (debug_level > 1) {
...@@ -225,6 +224,10 @@ public class IntersceneLma { ...@@ -225,6 +224,10 @@ public class IntersceneLma {
scenesCLT[1], // final QuadCLT scene_QuadClt, scenesCLT[1], // final QuadCLT scene_QuadClt,
scenesCLT[0], // final QuadCLT reference_QuadClt, scenesCLT[0], // final QuadCLT reference_QuadClt,
debug_level); // final int debug_level) debug_level); // final int debug_level)
if (vector_XYS == null) {
return; // for MB vectors (noLMA)
}
double [][] wjtj = getWJtJlambda( // USED in lwir all NAN double [][] wjtj = getWJtJlambda( // USED in lwir all NAN
0.0, // final double lambda, 0.0, // final double lambda,
last_jt); // final double [][] jt) all 0??? last_jt); // final double [][] jt) all 0???
...@@ -727,10 +730,12 @@ public class IntersceneLma { ...@@ -727,10 +730,12 @@ public class IntersceneLma {
final double [] scene_atr = new double[3]; final double [] scene_atr = new double[3];
final double [] reference_xyz = new double[3]; // will stay 0 final double [] reference_xyz = new double[3]; // will stay 0
final double [] reference_atr = new double[3]; // will stay 0 final double [] reference_atr = new double[3]; // will stay 0
final double [] fx = new double [weights.length]; final boolean mb_mode = (weights == null);
final int weights_length = mb_mode ? (2 * macrotile_centers.length) : weights.length;
final double [] fx = mb_mode ? null : (new double [weights_length]); // weights.length]; : weights.length :
if (jt != null) { if (jt != null) {
for (int i = 0; i < jt.length; i++) { for (int i = 0; i < jt.length; i++) {
jt[i] = new double [weights.length]; jt[i] = new double [weights_length]; // weights.length];
} }
} }
...@@ -758,14 +763,13 @@ public class IntersceneLma { ...@@ -758,14 +763,13 @@ public class IntersceneLma {
scene_atr, // double [] atr); scene_atr, // double [] atr);
false)[0]; // boolean invert)); false)[0]; // boolean invert));
// double [][][] derivs = new double [macrotile_centers.length + parameters_vector.length][][];
final Thread[] threads = ImageDtt.newThreadArray(opticalFlow.threadsMax); final Thread[] threads = ImageDtt.newThreadArray(opticalFlow.threadsMax);
final AtomicInteger ai = new AtomicInteger(0); final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) { for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() { threads[ithread] = new Thread() {
public void run() { public void run() {
for (int iMTile = ai.getAndIncrement(); iMTile < macrotile_centers.length; iMTile = ai.getAndIncrement()) { for (int iMTile = ai.getAndIncrement(); iMTile < macrotile_centers.length; iMTile = ai.getAndIncrement()) {
if ((macrotile_centers[iMTile]!=null) &&(weights[2*iMTile] > 0.0)){ // was: weights[iMTile]? if ((macrotile_centers[iMTile]!=null) && (mb_mode || (weights[2*iMTile] > 0.0))){ // was: weights[iMTile]?
//infinity_disparity //infinity_disparity
boolean is_infinity = macrotile_centers[iMTile][2] < infinity_disparity; boolean is_infinity = macrotile_centers[iMTile][2] < infinity_disparity;
double [][] deriv_params = ers_ref.getDPxSceneDParameters( double [][] deriv_params = ers_ref.getDPxSceneDParameters(
...@@ -782,8 +786,10 @@ public class IntersceneLma { ...@@ -782,8 +786,10 @@ public class IntersceneLma {
if (deriv_params!= null) { if (deriv_params!= null) {
boolean bad_tile = false; boolean bad_tile = false;
if (!bad_tile) { if (!bad_tile) {
fx[2 * iMTile + 0] = deriv_params[0][0]; // pX if (!mb_mode) {
fx[2 * iMTile + 1] = deriv_params[0][1]; // pY fx[2 * iMTile + 0] = deriv_params[0][0]; // pX
fx[2 * iMTile + 1] = deriv_params[0][1]; // pY
}
if (jt != null) { if (jt != null) {
for (int i = 0; i < par_indices.length; i++) { for (int i = 0; i < par_indices.length; i++) {
int indx = par_indices[i] + 1; int indx = par_indices[i] + 1;
...@@ -792,6 +798,11 @@ public class IntersceneLma { ...@@ -792,6 +798,11 @@ public class IntersceneLma {
} }
} }
} }
} else if (mb_mode) {
for (int i = 0; i < par_indices.length; i++) {
jt[i][2 * iMTile + 0] = Double.NaN; // pX
jt[i][2 * iMTile + 1] = Double.NaN; // ; // pY (disparity is not used)
}
} }
} }
} }
...@@ -799,6 +810,9 @@ public class IntersceneLma { ...@@ -799,6 +810,9 @@ public class IntersceneLma {
}; };
} }
ImageDtt.startAndJoin(threads); ImageDtt.startAndJoin(threads);
if (mb_mode) {
return null;
}
// pull to the initial parameter values // pull to the initial parameter values
for (int i = 0; i < par_indices.length; i++) { for (int i = 0; i < par_indices.length; i++) {
fx [i + 2 * macrotile_centers.length] = vector[i]; // - parameters_initial[i]; // scale will be combined with weights fx [i + 2 * macrotile_centers.length] = vector[i]; // - parameters_initial[i]; // scale will be combined with weights
......
...@@ -236,7 +236,12 @@ public class IntersceneMatchParameters { ...@@ -236,7 +236,12 @@ public class IntersceneMatchParameters {
// Boost amount // Boost amount
public double eq_weight_add = 0.03; // calculate from min-strengths public double eq_weight_add = 0.03; // calculate from min-strengths
public double eq_weight_scale = 20.0; // maximal boost ratio public double eq_weight_scale = 20.0; // maximal boost ratio
public double eq_level = 0.9; // equalization level (0.0 - leave as is, 1.0 - boost to have the same supertile strength as average) public double eq_level = 0.9; // equalization level (0.0 - leave as is, 1.0 - boost to have the same supertile strength as average)
public boolean mb_en = true;
public double mb_tau = 0.008; // time constant, sec
public double mb_max_gain = 5.0; // motion blur maximal gain (if more - move second point more than a pixel
public boolean stereo_merge = true; public boolean stereo_merge = true;
public int stereo_gap = 32; // pixels between right and left frames public int stereo_gap = 32; // pixels between right and left frames
...@@ -710,8 +715,15 @@ public class IntersceneMatchParameters { ...@@ -710,8 +715,15 @@ public class IntersceneMatchParameters {
gd.addNumericField("Equalization level", this.eq_level, 5,7,"", gd.addNumericField("Equalization level", this.eq_level, 5,7,"",
"Target supertile strength will be set to: 0 - original strength (no modification), 1.0 - average supertile strength."); "Target supertile strength will be set to: 0 - original strength (no modification), 1.0 - average supertile strength.");
gd.addTab("Stereo/Video","Parameters for stereo video generation"); gd.addTab("MB","Motion Blur");
gd.addCheckbox ("Compensate motion blur", this.mb_en,
"Ebable motion blur correction.");
gd.addNumericField("Sensor time constant", this.mb_tau, 5,7,"s",
"Sensor exponential decay in seconds.");
gd.addNumericField("Maximal gain", this.mb_max_gain, 5,7,"x",
"Maximal gain for motion blur correction (if needed more for 1 pixel, increase offset).");
gd.addTab("Stereo/Video","Parameters for stereo video generation");
gd.addMessage ("Stereo"); gd.addMessage ("Stereo");
if (stereo_views.length > 0) { if (stereo_views.length > 0) {
String [] stereo_choices = new String [stereo_views.length + 1]; String [] stereo_choices = new String [stereo_views.length + 1];
...@@ -987,9 +999,11 @@ public class IntersceneMatchParameters { ...@@ -987,9 +999,11 @@ public class IntersceneMatchParameters {
this.eq_max_disparity = gd.getNextNumber(); this.eq_max_disparity = gd.getNextNumber();
this.eq_weight_add = gd.getNextNumber(); this.eq_weight_add = gd.getNextNumber();
this.eq_weight_scale = gd.getNextNumber(); this.eq_weight_scale = gd.getNextNumber();
this.eq_level = gd.getNextNumber(); this.eq_level = gd.getNextNumber();
this.mb_en = gd.getNextBoolean();
this.mb_tau = gd.getNextNumber();
this.mb_max_gain = gd.getNextNumber();
if (stereo_views.length > 0) { if (stereo_views.length > 0) {
int i = gd.getNextChoiceIndex(); int i = gd.getNextChoiceIndex();
...@@ -1260,10 +1274,14 @@ public class IntersceneMatchParameters { ...@@ -1260,10 +1274,14 @@ public class IntersceneMatchParameters {
properties.setProperty(prefix+"eq_weight_scale", this.eq_weight_scale+""); // double properties.setProperty(prefix+"eq_weight_scale", this.eq_weight_scale+""); // double
properties.setProperty(prefix+"eq_level", this.eq_level+""); // double properties.setProperty(prefix+"eq_level", this.eq_level+""); // double
properties.setProperty(prefix+"stereo_merge", this.stereo_merge+""); // boolean properties.setProperty(prefix+"mb_en", this.mb_en+""); // boolean
properties.setProperty(prefix+"stereo_gap", this.stereo_gap+""); // int properties.setProperty(prefix+"mb_tau", this.mb_tau+""); // double
properties.setProperty(prefix+"stereo_intereye", this.stereo_intereye+""); // double properties.setProperty(prefix+"mb_max_gain", this.mb_max_gain+""); // double
properties.setProperty(prefix+"stereo_phone_width", this.stereo_phone_width+""); // double
properties.setProperty(prefix+"stereo_merge", this.stereo_merge+""); // boolean
properties.setProperty(prefix+"stereo_gap", this.stereo_gap+""); // int
properties.setProperty(prefix+"stereo_intereye", this.stereo_intereye+""); // double
properties.setProperty(prefix+"stereo_phone_width", this.stereo_phone_width+""); // double
properties.setProperty(prefix+"extra_hor_tile", this.extra_hor_tile+""); // int properties.setProperty(prefix+"extra_hor_tile", this.extra_hor_tile+""); // int
properties.setProperty(prefix+"extra_vert_tile", this.extra_vert_tile+""); // int properties.setProperty(prefix+"extra_vert_tile", this.extra_vert_tile+""); // int
properties.setProperty(prefix+"crop_3d", this.crop_3d+""); // boolean properties.setProperty(prefix+"crop_3d", this.crop_3d+""); // boolean
...@@ -1484,6 +1502,10 @@ public class IntersceneMatchParameters { ...@@ -1484,6 +1502,10 @@ public class IntersceneMatchParameters {
if (properties.getProperty(prefix+"eq_weight_scale")!=null) this.eq_weight_scale=Double.parseDouble(properties.getProperty(prefix+"eq_weight_scale")); if (properties.getProperty(prefix+"eq_weight_scale")!=null) this.eq_weight_scale=Double.parseDouble(properties.getProperty(prefix+"eq_weight_scale"));
if (properties.getProperty(prefix+"eq_level")!=null) this.eq_level=Double.parseDouble(properties.getProperty(prefix+"eq_level")); if (properties.getProperty(prefix+"eq_level")!=null) this.eq_level=Double.parseDouble(properties.getProperty(prefix+"eq_level"));
if (properties.getProperty(prefix+"mb_en")!=null) this.mb_en=Boolean.parseBoolean(properties.getProperty(prefix+"mb_en"));
if (properties.getProperty(prefix+"mb_tau")!=null) this.mb_tau=Double.parseDouble(properties.getProperty(prefix+"mb_tau"));
if (properties.getProperty(prefix+"mb_max_gain")!=null) this.mb_max_gain=Double.parseDouble(properties.getProperty(prefix+"mb_max_gain"));
if (properties.getProperty(prefix+"stereo_merge")!=null) this.stereo_merge=Boolean.parseBoolean(properties.getProperty(prefix+"stereo_merge")); if (properties.getProperty(prefix+"stereo_merge")!=null) this.stereo_merge=Boolean.parseBoolean(properties.getProperty(prefix+"stereo_merge"));
if (properties.getProperty(prefix+"stereo_gap")!=null) this.stereo_gap=Integer.parseInt(properties.getProperty(prefix+"stereo_gap")); if (properties.getProperty(prefix+"stereo_gap")!=null) this.stereo_gap=Integer.parseInt(properties.getProperty(prefix+"stereo_gap"));
if (properties.getProperty(prefix+"stereo_intereye")!=null) this.stereo_intereye=Double.parseDouble(properties.getProperty(prefix+"stereo_intereye")); if (properties.getProperty(prefix+"stereo_intereye")!=null) this.stereo_intereye=Double.parseDouble(properties.getProperty(prefix+"stereo_intereye"));
...@@ -1724,10 +1746,14 @@ public class IntersceneMatchParameters { ...@@ -1724,10 +1746,14 @@ public class IntersceneMatchParameters {
imp.eq_weight_scale = this.eq_weight_scale; imp.eq_weight_scale = this.eq_weight_scale;
imp.eq_level = this.eq_level; imp.eq_level = this.eq_level;
imp.stereo_merge = this.stereo_merge; imp.mb_en = this.mb_en;
imp.stereo_gap = this.stereo_gap; imp.mb_tau = this.mb_tau;
imp.stereo_intereye = this. stereo_intereye; imp.mb_max_gain = this.mb_max_gain;
imp.stereo_phone_width = this. stereo_phone_width;
imp.stereo_merge = this.stereo_merge;
imp.stereo_gap = this.stereo_gap;
imp.stereo_intereye = this. stereo_intereye;
imp.stereo_phone_width = this. stereo_phone_width;
imp.extra_hor_tile = this.extra_hor_tile; imp.extra_hor_tile = this.extra_hor_tile;
imp.extra_vert_tile = this.extra_vert_tile; imp.extra_vert_tile = this.extra_vert_tile;
......
...@@ -4557,7 +4557,6 @@ public class OpticalFlow { ...@@ -4557,7 +4557,6 @@ public class OpticalFlow {
((quadCLTs[ref_index].getNumAccum() < quadCLTs[ref_index].getNumOrient())|| ((quadCLTs[ref_index].getNumAccum() < quadCLTs[ref_index].getNumOrient())||
(quadCLTs[ref_index].getNumOrient() >= min_num_orient))) { (quadCLTs[ref_index].getNumOrient() >= min_num_orient))) {
// should skip scenes w/o orientation 06/29/2022 // should skip scenes w/o orientation 06/29/2022
combo_dsn_final = intersceneExport( // result indexed by COMBO_DSN_TITLES, COMBO_DSN_INDX_*** combo_dsn_final = intersceneExport( // result indexed by COMBO_DSN_TITLES, COMBO_DSN_INDX_***
clt_parameters, // CLTParameters clt_parameters, clt_parameters, // CLTParameters clt_parameters,
ers_reference, // ErsCorrection ers_reference, ers_reference, // ErsCorrection ers_reference,
...@@ -4565,8 +4564,6 @@ public class OpticalFlow { ...@@ -4565,8 +4564,6 @@ public class OpticalFlow {
colorProcParameters, // ColorProcParameters colorProcParameters, colorProcParameters, // ColorProcParameters colorProcParameters,
debugLevel); // int debug_level debugLevel); // int debug_level
quadCLTs[ref_index].inc_accum(); quadCLTs[ref_index].inc_accum();
// save with updated num_accum // save with updated num_accum
quadCLTs[ref_index].saveInterProperties( // save properties for interscene processing (extrinsics, ers, ...) quadCLTs[ref_index].saveInterProperties( // save properties for interscene processing (extrinsics, ers, ...)
...@@ -4959,7 +4956,6 @@ public class OpticalFlow { ...@@ -4959,7 +4956,6 @@ public class OpticalFlow {
ImagePlus imp_video = imp_scenes_pair[nstereo]; ImagePlus imp_video = imp_scenes_pair[nstereo];
boolean [] combine_modes = {!combine_left_right, stereo_merge && combine_left_right, anaglyth_en && !toRGB && combine_left_right }; boolean [] combine_modes = {!combine_left_right, stereo_merge && combine_left_right, anaglyth_en && !toRGB && combine_left_right };
for (int istereo_mode = 0; istereo_mode < combine_modes.length; istereo_mode++) if(combine_modes[istereo_mode]) { for (int istereo_mode = 0; istereo_mode < combine_modes.length; istereo_mode++) if(combine_modes[istereo_mode]) {
// if (combine_left_right) { // combine pairs multi-threaded
if (istereo_mode == 1) { // combine pairs for "Google" VR if (istereo_mode == 1) { // combine pairs for "Google" VR
final int left_width = imp_scenes_pair[0].getWidth(); final int left_width = imp_scenes_pair[0].getWidth();
final int right_width = imp_scenes_pair[1].getWidth(); final int right_width = imp_scenes_pair[1].getWidth();
...@@ -5007,17 +5003,8 @@ public class OpticalFlow { ...@@ -5007,17 +5003,8 @@ public class OpticalFlow {
imp_video.setStack(stereo_stack); imp_video.setStack(stereo_stack);
String title = imp_scenes_pair[1].getTitle(); String title = imp_scenes_pair[1].getTitle();
imp_video.setTitle(title.replace("-RIGHT","-STEREO")); imp_video.setTitle(title.replace("-RIGHT","-STEREO"));
// convert stereo_stack to imp_scenes_pair[1], keeping calibration and fps? // convert stereo_stack to imp_scenes_pair[1], keeping calibration and fps?
/// imp_scenes_pair[1].setStack(stereo_stack);
/// String title = imp_scenes_pair[1].getTitle();
/// imp_video = new ImagePlus(
/// imp_scenes_pair[1].getTitle().replace("-RIGHT","-STEREO"),
/// stereo_stack);
/// imp_scenes_pair[1].setTitle(title.replace("-RIGHT","-STEREO"));
} else if (istereo_mode == 2) { // combine anaglyph } else if (istereo_mode == 2) { // combine anaglyph
// final Color anaglyph_left = clt_parameters.imp.anaglyph_left;
// final Color anaglyph_right = clt_parameters.imp.anaglyph_right;
final double [] left_rgb= { final double [] left_rgb= {
anaglyph_left.getRed()/255.0, anaglyph_left.getRed()/255.0,
anaglyph_left.getGreen()/255.0, anaglyph_left.getGreen()/255.0,
...@@ -5066,8 +5053,6 @@ public class OpticalFlow { ...@@ -5066,8 +5053,6 @@ public class OpticalFlow {
imp_video.setStack(stereo_stack); imp_video.setStack(stereo_stack);
String title = imp_scenes_pair[1].getTitle(); String title = imp_scenes_pair[1].getTitle();
imp_video.setTitle(title.replace("-RIGHT","-ANAGLYPH")); imp_video.setTitle(title.replace("-RIGHT","-ANAGLYPH"));
/// String title = imp_scenes_pair[1].getTitle();
/// imp_scenes_pair[1].setTitle(title.replace("-RIGHT","-ANAGLYPH"));
} // if (istereo_mode == 1) {if (combine_left_right) { // combine pairs multi-threaded } // if (istereo_mode == 1) {if (combine_left_right) { // combine pairs multi-threaded
String avi_path=null; String avi_path=null;
video: video:
...@@ -5090,7 +5075,6 @@ public class OpticalFlow { ...@@ -5090,7 +5075,6 @@ public class OpticalFlow {
if (avi_path == null) { if (avi_path == null) {
break video; break video;
} }
// int img_width=imp_scenes_pair[nstereo].getWidth();
int img_width=imp_video.getWidth(); int img_width=imp_video.getWidth();
int stereo_width = combine_left_right? img_width:0; int stereo_width = combine_left_right? img_width:0;
stereo_widths_list.add(stereo_width); stereo_widths_list.add(stereo_width);
...@@ -5202,6 +5186,7 @@ public class OpticalFlow { ...@@ -5202,6 +5186,7 @@ public class OpticalFlow {
null, // testr, // null, // final Rectangle full_woi_in, // show larger than sensor WOI (or null) null, // testr, // null, // final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters, // CLTParameters clt_parameters, clt_parameters, // CLTParameters clt_parameters,
constant_disparity, // double [] disparity_ref, constant_disparity, // double [] disparity_ref,
ZERO3, // final double [] scene_xyz, // camera center in world coordinates ZERO3, // final double [] scene_xyz, // camera center in world coordinates
ZERO3, // new double[] {.1,0.1,.1}, // ZERO3, // final double [] scene_atr, // camera orientation relative to world frame ZERO3, // new double[] {.1,0.1,.1}, // ZERO3, // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs[ref_index], // final QuadCLT scene, quadCLTs[ref_index], // final QuadCLT scene,
...@@ -13404,7 +13389,13 @@ public double[][] correlateIntersceneDebug( // only uses GPU and quad ...@@ -13404,7 +13389,13 @@ public double[][] correlateIntersceneDebug( // only uses GPU and quad
boolean [] reliable_ref, // null or bitmask of reliable reference tiles boolean [] reliable_ref, // null or bitmask of reliable reference tiles
QuadCLT [] quadCLTs, QuadCLT [] quadCLTs,
int debugLevel) int debugLevel)
{ {
boolean test_motion_blur = true;
boolean mb_en = clt_parameters.imp.mb_en;
double mb_tau = clt_parameters.imp.mb_tau; // 0.008; // time constant, sec
double mb_max_gain = clt_parameters.imp.mb_max_gain; // 5.0; // motion blur maximal gain (if more - move second point more than a pixel
int earliest_scene = 0; int earliest_scene = 0;
boolean use_combo_dsi = clt_parameters.imp.use_combo_dsi; boolean use_combo_dsi = clt_parameters.imp.use_combo_dsi;
boolean use_lma_dsi = clt_parameters.imp.use_lma_dsi; boolean use_lma_dsi = clt_parameters.imp.use_lma_dsi;
...@@ -13440,13 +13431,33 @@ public double[][] correlateIntersceneDebug( // only uses GPU and quad ...@@ -13440,13 +13431,33 @@ public double[][] correlateIntersceneDebug( // only uses GPU and quad
} }
} }
} }
double [][] ref_pXpYD = null;
double [][] dbg_mb_img = null;
double [] mb_ref_disparity =null;
if (test_motion_blur) {
mb_ref_disparity = interscene_ref_disparity;
if (mb_ref_disparity == null) {
mb_ref_disparity = quadCLTs[ref_index].getDLS()[use_lma_dsi?1:0];
}
ref_pXpYD = transformToScenePxPyD( // full size - [tilesX*tilesY], some nulls
null, // final Rectangle [] extra_woi, // show larger than sensor WOI (or null)
mb_ref_disparity, // dls[0], // final double [] disparity_ref, // invalid tiles - NaN in disparity (maybe it should not be masked by margins?)
ZERO3, // final double [] scene_xyz, // camera center in world coordinates
ZERO3, // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs[ref_index], // final QuadCLT scene_QuadClt,
quadCLTs[ref_index]); // final QuadCLT reference_QuadClt)
dbg_mb_img = new double[quadCLTs.length][];
}
ErsCorrection ers_reference = quadCLTs[ref_index].getErsCorrection(); ErsCorrection ers_reference = quadCLTs[ref_index].getErsCorrection();
double [][][] dxyzatr_dt = new double[quadCLTs.length][][]; double [][][] dxyzatr_dt = new double[quadCLTs.length][][];
double [][][] scenes_xyzatr = new double [quadCLTs.length][][]; // previous scene relative to the next one double [][][] scenes_xyzatr = new double [quadCLTs.length][][]; // previous scene relative to the next one
scenes_xyzatr[ref_index] = new double[2][3]; // all zeros scenes_xyzatr[ref_index] = new double[2][3]; // all zeros
// should have at least next or previous non-null // should have at least next or previous non-null
int debug_scene = -15;
double maximal_series_rms = 0.00; double maximal_series_rms = 0.00;
for (int nscene = ref_index; nscene >= earliest_scene; nscene--) { for (int nscene = ref_index; nscene >= earliest_scene; nscene--) {
if ((quadCLTs[nscene] == null) || if ((quadCLTs[nscene] == null) ||
((nscene != ref_index) && ((nscene != ref_index) &&
...@@ -13468,7 +13479,7 @@ public double[][] correlateIntersceneDebug( // only uses GPU and quad ...@@ -13468,7 +13479,7 @@ public double[][] correlateIntersceneDebug( // only uses GPU and quad
nscene1 = nscene; nscene1 = nscene;
} }
if (nscene1 == nscene0) { if (nscene1 == nscene0) {
System.out.println("**** Isoloated scene!!! skippiung... now may only happen for a ref_scene****"); System.out.println("**** Isoloated scene!!! skipping... now may only happen for a ref_scene****");
earliest_scene = nscene + 1; earliest_scene = nscene + 1;
break; break;
} }
...@@ -13487,15 +13498,88 @@ public double[][] correlateIntersceneDebug( // only uses GPU and quad ...@@ -13487,15 +13498,88 @@ public double[][] correlateIntersceneDebug( // only uses GPU and quad
double [] scene_atr1 = (nscene1== ref_index)? ZERO3:ers_reference.getSceneATR(ts1); double [] scene_atr1 = (nscene1== ref_index)? ZERO3:ers_reference.getSceneATR(ts1);
dxyzatr_dt[nscene] = new double[2][3]; dxyzatr_dt[nscene] = new double[2][3];
for (int i = 0; i < 3; i++) { for (int i = 0; i < 3; i++) {
dxyzatr_dt[nscene][0][i] = 0.0; // (scene_xyz1[i]-scene_xyz0[i])/dt; dxyzatr_dt[nscene][0][i] = (scene_xyz1[i]-scene_xyz0[i])/dt;
dxyzatr_dt[nscene][1][i] = (scene_atr1[i]-scene_atr0[i])/dt; dxyzatr_dt[nscene][1][i] = (scene_atr1[i]-scene_atr0[i])/dt;
} }
double [] scene_xyz_pre = ZERO3; double [] scene_xyz_pre = ZERO3;
double [] scene_atr_pre = ZERO3; double [] scene_atr_pre = ZERO3;
quadCLTs[nscene].getErsCorrection().setErsDt( // set for ref also (should be set before non-ref!) quadCLTs[nscene].getErsCorrection().setErsDt( // set for ref also (should be set before non-ref!)
dxyzatr_dt[nscene][0], // double [] ers_xyz_dt, ZERO3, //, // dxyzatr_dt[nscene][0], // double [] ers_xyz_dt,
dxyzatr_dt[nscene][1]); // double [] ers_atr_dt)(ers_scene_original_xyz_dt); dxyzatr_dt[nscene][1]); // double [] ers_atr_dt)(ers_scene_original_xyz_dt);
int debug_scene = -15; if (dbg_mb_img != null) {
dbg_mb_img[nscene] = new double [tilesX*tilesY*2];
Arrays.fill(dbg_mb_img[nscene],Double.NaN);
double [] mb_scene_xyz = (nscene != ref_index)? ers_reference.getSceneXYZ(ts):ZERO3;
double [] mb_scene_atr = (nscene != ref_index)? ers_reference.getSceneATR(ts):ZERO3;
double [][] motion_blur = getMotionBlur(
quadCLTs[ref_index], // QuadCLT ref_scene,
quadCLTs[nscene], // QuadCLT scene, // can be the same as ref_scene
ref_pXpYD, // double [][] ref_pXpYD, // here it is scene, not reference!
mb_scene_xyz, // double [] camera_xyz,
mb_scene_atr, // double [] camera_atr,
dxyzatr_dt[nscene][0], // double [] camera_xyz_dt,
dxyzatr_dt[nscene][1], // double [] camera_atr_dt,
debugLevel); // int debug_level)
for (int nTile = 0; nTile < motion_blur.length; nTile++) if (motion_blur[nTile] != null) {
int tx = nTile % tilesX;
int ty = nTile / tilesX;
dbg_mb_img[nscene][tx + tilesX * (ty*2 +0)] = motion_blur[nTile][0];
dbg_mb_img[nscene][tx + tilesX * (ty*2 +1)] = motion_blur[nTile][1];
}
boolean show_corrected = false;
if (nscene == debug_scene) {
System.out.println("nscene = "+nscene);
System.out.println("nscene = "+nscene);
}
while (show_corrected) {
ImagePlus imp_mbc = QuadCLT.renderGPUFromDSI(
-1, // final int sensor_mask,
false, // final boolean merge_channels,
null, // final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters, // CLTParameters clt_parameters,
mb_ref_disparity, // double [] disparity_ref,
// motion blur compensation
mb_tau, // double mb_tau, // 0.008; // time constant, sec
mb_max_gain, // double mb_max_gain, // 5.0; // motion blur maximal gain (if more - move second point more than a pixel
motion_blur, // double [][] mb_vectors, //
mb_scene_xyz, // ZERO3, // final double [] scene_xyz, // camera center in world coordinates
mb_scene_atr, // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs[nscene], // final QuadCLT scene,
quadCLTs[ref_index], // final QuadCLT ref_scene, // now - may be null - for testing if scene is rotated ref
false, // toRGB, // final boolean toRGB,
clt_parameters.imp.show_color_nan,
quadCLTs[nscene].getImageName()+"-MOTION_BLUR_CORRECTED", // String suffix,
threadsMax, // int threadsMax,
debugLevel); // int debugLevel)
imp_mbc.show();
ImagePlus imp_mbc_merged = QuadCLT.renderGPUFromDSI(
-1, // final int sensor_mask,
true, // final boolean merge_channels,
null, // final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters, // CLTParameters clt_parameters,
mb_ref_disparity, // double [] disparity_ref,
// motion blur compensation
mb_tau, // double mb_tau, // 0.008; // time constant, sec
mb_max_gain, // double mb_max_gain, // 5.0; // motion blur maximal gain (if more - move second point more than a pixel
motion_blur, // double [][] mb_vectors, //
mb_scene_xyz, // ZERO3, // final double [] scene_xyz, // camera center in world coordinates
mb_scene_atr, // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs[nscene], // final QuadCLT scene,
quadCLTs[ref_index], // final QuadCLT ref_scene, // now - may be null - for testing if scene is rotated ref
false, // toRGB, // final boolean toRGB,
clt_parameters.imp.show_color_nan,
quadCLTs[nscene].getImageName()+"-MOTION_BLUR_CORRECTED", // String suffix,
threadsMax, // int threadsMax,
debugLevel); // int debugLevel)
imp_mbc_merged.show();
}
}
// int debug_scene = -15;
if (nscene != ref_index) { if (nscene != ref_index) {
if (nscene == debug_scene) { if (nscene == debug_scene) {
System.out.println("nscene = "+nscene); System.out.println("nscene = "+nscene);
...@@ -13550,7 +13634,20 @@ public double[][] correlateIntersceneDebug( // only uses GPU and quad ...@@ -13550,7 +13634,20 @@ public double[][] correlateIntersceneDebug( // only uses GPU and quad
} }
} }
} // for (int nscene = ref_index; nscene > earliest_scene; nscene--) { } // for (int nscene = ref_index; nscene > earliest_scene; nscene--) {
if (dbg_mb_img != null) {
String [] dbg_mb_titles = new String[quadCLTs.length];
for (int i = 0; i < quadCLTs.length; i++) if (quadCLTs[i] != null) {
dbg_mb_titles[i] = quadCLTs[i].getImageName();
}
(new ShowDoubleFloatArrays()).showArrays(
dbg_mb_img,
tilesX * 2,
tilesY,
true,
quadCLTs[ref_index].getImageName()+"-MOTION_BLUR",
dbg_mb_titles);
}
if (debugLevel > -4) { if (debugLevel > -4) {
System.out.println("All multi scene passes are Done. Maximal RMSE was "+maximal_series_rms); System.out.println("All multi scene passes are Done. Maximal RMSE was "+maximal_series_rms);
} }
...@@ -13561,6 +13658,84 @@ public double[][] correlateIntersceneDebug( // only uses GPU and quad ...@@ -13561,6 +13658,84 @@ public double[][] correlateIntersceneDebug( // only uses GPU and quad
return earliest_scene; return earliest_scene;
} }
/**
* Get per-tile motion blur vector
* @param ref_scene reference scene
* @param scene current scene (may be the same as reference)
* @param ref_pXpYD per-tile pX, pY, disparity for reference scene (some may be nulls)
* @param camera_xyz camera x,y,z relative to the reference
* @param camera_atr camera azimuth, tilt, roll relative to the reference
* @param camera_xyz_dt camera linear velocities: x', y', z'
* @param camera_atr_dt camera angular velocities: azimuth', tilt', roll'
* @param debug_level debug level
* @return per-tile array of {dx/dt, dy/dt} vectors, some may be null
*/
public double [][] getMotionBlur(
QuadCLT ref_scene,
QuadCLT scene, // can be the same as ref_scene
double [][] ref_pXpYD,
double [] camera_xyz,
double [] camera_atr,
double [] camera_xyz_dt,
double [] camera_atr_dt,
// boolean fill_gaps,
int debug_level)
{
boolean[] param_select = new boolean[ErsCorrection.DP_NUM_PARS];
final int [] par_indices = new int[] {
ErsCorrection.DP_DSAZ,
ErsCorrection.DP_DSTL,
ErsCorrection.DP_DSRL,
ErsCorrection.DP_DSX,
ErsCorrection.DP_DSY,
ErsCorrection.DP_DSZ};
for (int i: par_indices) {
param_select[i]=true;
}
final double [] camera_dt = new double[] {
camera_atr_dt[0], camera_atr_dt[1], camera_atr_dt[2],
camera_xyz_dt[0], camera_xyz_dt[1], camera_xyz_dt[2]};
final double [][] mb_vectors = new double [ref_pXpYD.length][];
IntersceneLma intersceneLma = new IntersceneLma(
this, // OpticalFlow opticalFlow
false); // clt_parameters.ilp.ilma_thread_invariant);
intersceneLma.prepareLMA(
camera_xyz, // final double [] scene_xyz0, // camera center in world coordinates (or null to use instance)
camera_atr, // final double [] scene_atr0, // camera orientation relative to world frame (or null to use instance)
scene, // final QuadCLT scene_QuadClt,
ref_scene, // final QuadCLT reference_QuadClt,
param_select, // final boolean[] param_select,
null, // final double [] param_regweights,
null, // final double [][] vector_XYS, // optical flow X,Y, confidence obtained from the correlate2DIterate()
ref_pXpYD, // final double [][] centers, // macrotile centers (in pixels and average disparities
false, // boolean first_run,
debug_level); // final int debug_level)
final double [][] last_jt = intersceneLma. getLastJT(); // alternating x,y for each selected parameters
final Thread[] threads = ImageDtt.newThreadArray(threadsMax);
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nTile = ai.getAndIncrement(); nTile < ref_pXpYD.length; nTile = ai.getAndIncrement()) if (ref_pXpYD[nTile] != null){
mb_vectors[nTile]= new double[2];
for (int i = 0; i < par_indices.length; i++) {
mb_vectors[nTile][0] += camera_dt[i] * last_jt[i][2*nTile + 0];
mb_vectors[nTile][1] += camera_dt[i] * last_jt[i][2*nTile + 1];
}
if (Double.isNaN(mb_vectors[nTile][0]) || Double.isNaN(mb_vectors[nTile][1])) {
mb_vectors[nTile] = null;
}
}
}
};
}
ImageDtt.startAndJoin(threads);
return mb_vectors;
}
public double[][] adjustPairsLMAInterscene( public double[][] adjustPairsLMAInterscene(
CLTParameters clt_parameters, CLTParameters clt_parameters,
QuadCLT reference_QuadClt, QuadCLT reference_QuadClt,
...@@ -13614,84 +13789,6 @@ public double[][] correlateIntersceneDebug( // only uses GPU and quad ...@@ -13614,84 +13789,6 @@ public double[][] correlateIntersceneDebug( // only uses GPU and quad
System.out.println("adjustPairsLMAInterscene() returned null"); System.out.println("adjustPairsLMAInterscene() returned null");
return null; return null;
} }
/*
int eq_stride_hor = 8;
int eq_stride_vert = 8;
double eq_min_stile_weight = 0.2; // 1.0;
int eq_min_stile_number = 10;
double eq_min_stile_fraction = 0.02; // 0.05;
double eq_min_disparity = 5;
double eq_max_disparity = 100;
double eq_weight_add = 0.1;
double eq_weight_scale = 10;
double eq_level = 0.8; // equalize to (log) fraction of average/this strength
if (run_equalize && near_important) {
TileProcessor tp = reference_QuadClt.getTileProcessor();
int tilesX = tp.getTilesX();
int tilesY = tp.getTilesY();
// backup coord_motion[1][][2] // strength
double [] strength_backup = null;
if (debug_equalize) {
strength_backup = new double [coord_motion[1].length];
for (int i = 0; i < strength_backup.length; i++) if (coord_motion[1][i] != null) {
strength_backup[i] = coord_motion[1][i][2];
}
}
while (run_equalize) {
// restore
if (strength_backup != null) {
for (int i = 0; i < strength_backup.length; i++) if (coord_motion[1][i] != null) {
coord_motion[1][i][2] = strength_backup[i];
}
}
equalizeMotionVectorsWeights(
coord_motion, // final double [][][] coord_motion,
tilesX, // final int tilesX,
eq_stride_hor, // final int stride_hor,
eq_stride_vert, // final int stride_vert,
eq_min_stile_weight, // final double min_stile_weight,
eq_min_stile_number, // final int min_stile_number,
eq_min_stile_fraction, // final double min_stile_fraction,
eq_min_disparity, // final double min_disparity,
eq_max_disparity, // final double max_disparity,
eq_weight_add, // final double weight_add,
eq_weight_scale, // final double weight_scale)
eq_level); // equalize to (log) fraction of average/this strength
if (!debug_equalize) {
break;
}
String [] mvTitles = {"dx", "dy","conf", "conf0", "pX", "pY","Disp","defined"}; // ,"blurX","blurY", "blur"};
double [][] dbg_img = new double [mvTitles.length][tilesX*tilesY];
for (int l = 0; l < dbg_img.length; l++) {
Arrays.fill(dbg_img[l], Double.NaN);
}
for (int nTile = 0; nTile < coord_motion[0].length; nTile++) {
if (coord_motion[0][nTile] != null) {
for (int i = 0; i <3; i++) {
dbg_img[4+i][nTile] = coord_motion[0][nTile][i];
}
}
dbg_img[3] = strength_backup;
if (coord_motion[1][nTile] != null) {
for (int i = 0; i <3; i++) {
dbg_img[0+i][nTile] = coord_motion[1][nTile][i];
}
}
dbg_img[7][nTile] = ((coord_motion[0][nTile] != null)?1:0)+((coord_motion[0][nTile] != null)?2:0);
}
(new ShowDoubleFloatArrays()).showArrays( // out of boundary 15
dbg_img,
tilesX,
tilesY,
true,
scene_QuadClt.getImageName()+"-"+reference_QuadClt.getImageName()+"-coord_motion-eq",
mvTitles);
}
}
*/
intersceneLma.prepareLMA( intersceneLma.prepareLMA(
camera_xyz0, // final double [] scene_xyz0, // camera center in world coordinates (or null to use instance) camera_xyz0, // final double [] scene_xyz0, // camera center in world coordinates (or null to use instance)
......
...@@ -2660,6 +2660,42 @@ public class QuadCLT extends QuadCLTCPU { ...@@ -2660,6 +2660,42 @@ public class QuadCLT extends QuadCLTCPU {
} }
public static ImagePlus renderGPUFromDSI(
final int sensor_mask,
final boolean merge_channels,
final Rectangle full_woi_in, // show larger than sensor WOI in tiles (or null)
CLTParameters clt_parameters,
double [] disparity_ref,
final double [] scene_xyz, // camera center in world coordinates
final double [] scene_atr, // camera orientation relative to world frame
final QuadCLT scene,
final QuadCLT ref_scene, // now - may be null - for testing if scene is rotated ref
final boolean toRGB,
final boolean show_nan,
String suffix,
int threadsMax,
final int debugLevel){
return renderGPUFromDSI(
sensor_mask,
merge_channels,
full_woi_in, // show larger than sensor WOI in tiles (or null)
clt_parameters,
disparity_ref,
// motion blur compensation
0.0, // double mb_tau, // 0.008; // time constant, sec
0.0, // mb_max_gain, // 5.0; // motion blur maximal gain (if more - move second point more than a pixel
null, // double [][] mb_vectors, //
scene_xyz, // camera center in world coordinates
scene_atr, // camera orientation relative to world frame
scene,
ref_scene, // now - may be null - for testing if scene is rotated ref
toRGB,
show_nan,
suffix,
threadsMax,
debugLevel);
}
public static ImagePlus renderGPUFromDSI( public static ImagePlus renderGPUFromDSI(
final int sensor_mask, final int sensor_mask,
...@@ -2667,8 +2703,11 @@ public class QuadCLT extends QuadCLTCPU { ...@@ -2667,8 +2703,11 @@ public class QuadCLT extends QuadCLTCPU {
final Rectangle full_woi_in, // show larger than sensor WOI in tiles (or null) final Rectangle full_woi_in, // show larger than sensor WOI in tiles (or null)
CLTParameters clt_parameters, CLTParameters clt_parameters,
double [] disparity_ref, double [] disparity_ref,
// not used, just as null/not null now. All offsets are already in scene_xyz, scene_atr (including ref) // motion blur compensation
// double [] stereo_offset, // offset reference camera {x,y,z} or null double mb_tau, // 0.008; // time constant, sec
double mb_max_gain, // 5.0; // motion blur maximal gain (if more - move second point more than a pixel
double [][] mb_vectors, //
final double [] scene_xyz, // camera center in world coordinates final double [] scene_xyz, // camera center in world coordinates
final double [] scene_atr, // camera orientation relative to world frame final double [] scene_atr, // camera orientation relative to world frame
final QuadCLT scene, final QuadCLT scene,
...@@ -2678,7 +2717,6 @@ public class QuadCLT extends QuadCLTCPU { ...@@ -2678,7 +2717,6 @@ public class QuadCLT extends QuadCLTCPU {
String suffix, String suffix,
int threadsMax, int threadsMax,
final int debugLevel){ final int debugLevel){
// boolean show_nan = toRGB? clt_parameters.imp.show_color_nan : clt_parameters.imp.show_mono_nan;
double [][] pXpYD =OpticalFlow.transformToScenePxPyD( // now should work with offset ref_scene double [][] pXpYD =OpticalFlow.transformToScenePxPyD( // now should work with offset ref_scene
full_woi_in, // final Rectangle [] extra_woi, // show larger than sensor WOI (or null) full_woi_in, // final Rectangle [] extra_woi, // show larger than sensor WOI (or null)
disparity_ref, // final double [] disparity_ref, // invalid tiles - NaN in disparity disparity_ref, // final double [] disparity_ref, // invalid tiles - NaN in disparity
...@@ -2695,35 +2733,62 @@ public class QuadCLT extends QuadCLTCPU { ...@@ -2695,35 +2733,62 @@ public class QuadCLT extends QuadCLTCPU {
if (showPxPyD) { if (showPxPyD) {
int dbg_width = rendered_width/GPUTileProcessor.DTT_SIZE; int dbg_width = rendered_width/GPUTileProcessor.DTT_SIZE;
int dbg_height = pXpYD.length/dbg_width; int dbg_height = pXpYD.length/dbg_width;
double [][] dbg_img = new double [3][pXpYD.length]; double [][] dbg_img = new double [3 + ((mb_vectors!=null)? 2:0)][pXpYD.length];
String [] dbg_titles = (mb_vectors!=null)?
(new String[] {"pX","pY","Disparity","mb_X","mb_Y"}):
(new String[] {"pX","pY","Disparity"});
for (int i = 0; i < dbg_img.length; i++) { for (int i = 0; i < dbg_img.length; i++) {
Arrays.fill(dbg_img[i], Double.NaN); Arrays.fill(dbg_img[i], Double.NaN);
} }
for (int nTile = 0; nTile < pXpYD.length; nTile++) if (pXpYD[nTile] != null){ for (int nTile = 0; nTile < pXpYD.length; nTile++) if (pXpYD[nTile] != null){
for (int i = 0; i < dbg_img.length; i++) { for (int i = 0; i < pXpYD[nTile].length; i++) {
dbg_img[i][nTile] = pXpYD[nTile][i]; dbg_img[i][nTile] = pXpYD[nTile][i];
} }
if (mb_vectors[nTile]!=null) {
for (int i = 0; i <2; i++) {
dbg_img[3 + i][nTile] = mb_tau * mb_vectors[nTile][i];
}
}
} }
(new ShowDoubleFloatArrays()).showArrays( // out of boundary 15 (new ShowDoubleFloatArrays()).showArrays( // out of boundary 15
dbg_img, dbg_img,
dbg_width, dbg_width,
dbg_height, dbg_height,
true, true,
"pXpYD", scene.getImageName()+"-pXpYD",
new String[] {"pX","pY","Disparity"}); dbg_titles);
}
TpTask[][] tp_tasks;
if (mb_vectors!=null) {
tp_tasks = GpuQuad.setInterTasksMotionBlur( // "true" reference, with stereo actual reference will be offset
scene.getNumSensors(),
rendered_width, // should match output size, pXpYD.length
!scene.hasGPU(), // final boolean calcPortsCoordinatesAndDerivatives, // GPU can calculate them centreXY
pXpYD, // final double [][] pXpYD, // per-tile array of pX,pY,disparity triplets (or nulls)
null, // final boolean [] selection, // may be null, if not null do not process unselected tiles
// motion blur compensation
mb_tau, // final double mb_tau, // 0.008; // time constant, sec
mb_max_gain, // final double mb_max_gain, // 5.0; // motion blur maximal gain (if more - move second point more than a pixel
mb_vectors, //final double [][] mb_vectors, //
scene.getErsCorrection(), // final GeometryCorrection geometryCorrection,
0.0, // final double disparity_corr,
-1, // 0, // margin, // final int margin, // do not use tiles if their centers are closer to the edges
null, // final boolean [] valid_tiles,
threadsMax); // final int threadsMax) // maximal number of threads to launch
} else {
tp_tasks = new TpTask[1][];
tp_tasks[0] = GpuQuad.setInterTasks( // "true" reference, with stereo actual reference will be offset
scene.getNumSensors(),
rendered_width, // should match output size, pXpYD.length
!scene.hasGPU(), // final boolean calcPortsCoordinatesAndDerivatives, // GPU can calculate them centreXY
pXpYD, // final double [][] pXpYD, // per-tile array of pX,pY,disparity triplets (or nulls)
null, // final boolean [] selection, // may be null, if not null do not process unselected tiles
scene.getErsCorrection(), // final GeometryCorrection geometryCorrection,
0.0, // final double disparity_corr,
-1, // 0, // margin, // final int margin, // do not use tiles if their centers are closer to the edges
null, // final boolean [] valid_tiles,
threadsMax); // final int threadsMax) // maximal number of threads to launch
} }
//scene_QuadClt.getTileProcessor().getTileSize();
TpTask[] tp_tasks_ref = GpuQuad.setInterTasks( // "true" reference, with stereo actual reference will be offset
scene.getNumSensors(),
rendered_width, // should match output size, pXpYD.length
!scene.hasGPU(), // final boolean calcPortsCoordinatesAndDerivatives, // GPU can calculate them centreXY
pXpYD, // final double [][] pXpYD, // per-tile array of pX,pY,disparity triplets (or nulls)
null, // final boolean [] selection, // may be null, if not null do not process unselected tiles
scene.getErsCorrection(), // final GeometryCorrection geometryCorrection,
0.0, // final double disparity_corr,
-1, // 0, // margin, // final int margin, // do not use tiles if their centers are closer to the edges
null, // final boolean [] valid_tiles,
threadsMax); // final int threadsMax) // maximal number of threads to launch
scene.saveQuadClt(); // to re-load new set of Bayer images to the GPU (do nothing for CPU) and Geometry scene.saveQuadClt(); // to re-load new set of Bayer images to the GPU (do nothing for CPU) and Geometry
ImageDtt image_dtt = new ImageDtt( ImageDtt image_dtt = new ImageDtt(
scene.getNumSensors(), scene.getNumSensors(),
...@@ -2738,20 +2803,35 @@ public class QuadCLT extends QuadCLTCPU { ...@@ -2738,20 +2803,35 @@ public class QuadCLT extends QuadCLTCPU {
int [] wh = (full_woi_in == null)? null: new int[]{ int [] wh = (full_woi_in == null)? null: new int[]{
full_woi_in.width * GPUTileProcessor.DTT_SIZE, full_woi_in.width * GPUTileProcessor.DTT_SIZE,
full_woi_in.height * GPUTileProcessor.DTT_SIZE}; full_woi_in.height * GPUTileProcessor.DTT_SIZE};
// boolean toRGB = true; // does not work here, define in ColorProcParameters
int erase_clt = show_nan ? 1:0; int erase_clt = show_nan ? 1:0;
image_dtt.setReferenceTD( // change to main? boolean test1 = true;
erase_clt, //final int erase_clt, if ((mb_vectors!=null) && test1) {
wh, // null, // final int [] wh, // null (use sensor dimensions) or pair {width, height} in pixels image_dtt.setReferenceTDMotionBlur( // change to main?
clt_parameters.img_dtt, // final ImageDttParameters imgdtt_params, // Now just extra correlation parameters, later will include, most others erase_clt, //final int erase_clt,
use_reference, // true, // final boolean use_reference_buffer, wh, // null, // final int [] wh, // null (use sensor dimensions) or pair {width, height} in pixels
tp_tasks_ref, // final TpTask[] tp_tasks, clt_parameters.img_dtt, // final ImageDttParameters imgdtt_params, // Now just extra correlation parameters, later will include, most others
clt_parameters.gpu_sigma_r, // final double gpu_sigma_r, // 0.9, 1.1 use_reference, // true, // final boolean use_reference_buffer,
clt_parameters.gpu_sigma_b, // final double gpu_sigma_b, // 0.9, 1.1 tp_tasks, // final TpTask[] tp_tasks,
clt_parameters.gpu_sigma_g, // final double gpu_sigma_g, // 0.6, 0.7 clt_parameters.gpu_sigma_r, // final double gpu_sigma_r, // 0.9, 1.1
clt_parameters.gpu_sigma_m, // final double gpu_sigma_m, // = 0.4; // 0.7; clt_parameters.gpu_sigma_b, // final double gpu_sigma_b, // 0.9, 1.1
threadsMax, // final int threadsMax, // maximal number of threads to launch clt_parameters.gpu_sigma_g, // final double gpu_sigma_g, // 0.6, 0.7
debugLevel); // final int globalDebugLevel); clt_parameters.gpu_sigma_m, // final double gpu_sigma_m, // = 0.4; // 0.7;
threadsMax, // final int threadsMax, // maximal number of threads to launch
debugLevel); // final int globalDebugLevel);
} else {
image_dtt.setReferenceTD( // change to main?
erase_clt, //final int erase_clt,
wh, // null, // final int [] wh, // null (use sensor dimensions) or pair {width, height} in pixels
clt_parameters.img_dtt, // final ImageDttParameters imgdtt_params, // Now just extra correlation parameters, later will include, most others
use_reference, // true, // final boolean use_reference_buffer,
tp_tasks[0], // final TpTask[] tp_tasks,
clt_parameters.gpu_sigma_r, // final double gpu_sigma_r, // 0.9, 1.1
clt_parameters.gpu_sigma_b, // final double gpu_sigma_b, // 0.9, 1.1
clt_parameters.gpu_sigma_g, // final double gpu_sigma_g, // 0.6, 0.7
clt_parameters.gpu_sigma_m, // final double gpu_sigma_m, // = 0.4; // 0.7;
threadsMax, // final int threadsMax, // maximal number of threads to launch
debugLevel); // final int globalDebugLevel);
}
ImagePlus imp_render = scene.renderFromTD ( ImagePlus imp_render = scene.renderFromTD (
sensor_mask, // final int sensor_mask, sensor_mask, // final int sensor_mask,
merge_channels, // boolean merge_channels, merge_channels, // boolean merge_channels,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment