Commit 3494b1d7 authored by Andrey Filippov's avatar Andrey Filippov

Debugged rendering sequence for center virtual view

parent 844fe76d
......@@ -169,12 +169,9 @@ import ij.process.ImageProcessor;
int num_slices = pixels[0].length;
double [][] dpixels = new double [num_frames*num_slices][];
// System.out.println("pixels.length="+pixels.length+" pixels[0].length="+pixels[0].length);
for (int f = 0; f < num_frames; f++) {
// System.out.println("f="+f);
for (int s = 0; s < num_slices; s ++) {
int indx = s + f * num_slices;
// System.out.println("f="+f+" s="+s+" indx="+indx);
dpixels[indx] = pixels[f][s];
}
}
......
......@@ -2019,6 +2019,7 @@ public class Interscene {
null, // final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters, // CLTParameters clt_parameters,
mb_ref_disparity, // double [] disparity_ref,
null, // double [][] ref_pXpYD, // alternative to disparity_ref when reference is not uniform
// motion blur compensation
mb_tau, // double mb_tau, // 0.008; // time constant, sec
mb_max_gain, // double mb_max_gain, // 5.0; // motion blur maximal gain (if more - move second point more than a pixel
......@@ -2040,6 +2041,7 @@ public class Interscene {
null, // final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters, // CLTParameters clt_parameters,
mb_ref_disparity, // double [] disparity_ref,
null, // double [][] ref_pXpYD, // alternative to disparity_ref when reference is not uniform
// motion blur compensation
mb_tau, // double mb_tau, // 0.008; // time constant, sec
mb_max_gain, // double mb_max_gain, // 5.0; // motion blur maximal gain (if more - move second point more than a pixel
......
......@@ -51,6 +51,7 @@ import com.elphel.imagej.cameras.CLTParameters;
import com.elphel.imagej.cameras.ColorProcParameters;
import com.elphel.imagej.cameras.EyesisCorrectionParameters;
import com.elphel.imagej.common.DoubleGaussianBlur;
import com.elphel.imagej.common.PolynomialApproximation;
import com.elphel.imagej.common.ShowDoubleFloatArrays;
import com.elphel.imagej.correction.CorrectionColorProc;
import com.elphel.imagej.cuas.CuasCenterLma;
......@@ -2865,8 +2866,6 @@ public class OpticalFlow {
(scene_xyz[0]==0.0) && (scene_xyz[1]==0.0) && (scene_xyz[2]==0.0) && // java.lang.NullPointerException
(scene_atr[0]==0.0) && (scene_atr[1]==0.0) && (scene_atr[2]==0.0);
final double [] disparity_ref = dref;
// final int tilesX_ref = ref_w;
// final int tilesY_ref = ref_h;
final int tiles = tilesX*tilesY;
final int transform_size = tp.getTileSize();
final double [][] pXpYD= new double [tiles][];
......@@ -2943,6 +2942,321 @@ public class OpticalFlow {
return pXpYD;
}
/**
* Calculate pX, pY, Disparity triplets for the rotated scene to match non-uniform grid of a reference camera
* May be used in iterative reversal - uniform grid on the offset camera (real or virtual) to non-uniform grid
* on reference one
* @param reference_pXpYD_in pXpYD triplets for the reference camera (uniform or non-uniform)
* @param scene_xyz scene linear offset from the reference scene in reference scene coordinates (in meters)
* @param scene_atr scene azimuth, tilt, roll offset relative to the reference scene
* @param reference_QuadClt reference scene instance
* @return pX, pY, Disparity of the other scene. pX, pY are measured from the sensor top left corner
*/
public static double [][] transformToScenePxPyD(
final double [][] reference_pXpYD, // invalid tiles - NaN in disparity. Should be no nulls, no NaN disparity
final double [] scene_xyz, // camera center in world (reference) coordinates
final double [] scene_atr, // camera orientation relative to world (reference) frame
final QuadCLT reference_QuadClt,
final QuadCLT scene_QuadClt) //
{
boolean debug_ers = false; // true; // false; // true; // true; //11.01.2022
boolean ignore_ers = false; // false;
TileProcessor tp = reference_QuadClt.getTileProcessor();
final int tilesX = tp.getTilesX();
final int tilesY = tp.getTilesY();
final int tiles = tilesX*tilesY;
final double [][] pXpYD= new double [tiles][];
final ErsCorrection ersReferenceCorrection = reference_QuadClt.getErsCorrection();
if (ignore_ers) {
ersReferenceCorrection.setErsDt(
ZERO3, // double [] ers_xyz_dt,
ZERO3); // double [] ers_atr_dt);
}
ersReferenceCorrection.setupERS(); // just in case - setUP using instance parameters
if (debug_ers) {
System.out.println("reference: "+ reference_QuadClt.getImageName());
ersReferenceCorrection.printVectors(null,null);
}
final int scene_width = ersReferenceCorrection.getSensorWH()[0];
final int scene_height = ersReferenceCorrection.getSensorWH()[1];
final Thread[] threads = ImageDtt.newThreadArray(ImageDtt.THREADS_MAX);
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nTile = ai.getAndIncrement(); nTile < tiles; nTile = ai.getAndIncrement()) {
if ((reference_pXpYD[nTile] != null) && !Double.isNaN(reference_pXpYD[nTile][2])) {// null !!!
double disparity = reference_pXpYD[nTile][2];
double centerX = reference_pXpYD[nTile][0]; // (tileX + 0.5 - offsetX_ref) * transform_size; // - shiftX;
double centerY = reference_pXpYD[nTile][1]; // (tileY + 0.5 - offsetY_ref) * transform_size; // - shiftY;
if (disparity < 0) {
disparity = 1.0* disparity; // 0.0;
}
pXpYD[nTile] = ersReferenceCorrection.getImageCoordinatesERS( // ersCorrection - reference
scene_QuadClt, // scene_QuadClt, // QuadCLT cameraQuadCLT, // camera station that got image to be to be matched
centerX, // double px, // pixel coordinate X in the reference view
centerY, // double py, // pixel coordinate Y in the reference view
disparity, // double disparity, // reference disparity
true, // boolean distortedView, // This camera view is distorted (diff.rect), false - rectilinear
ZERO3, // double [] reference_xyz, // this view position in world coordinates (typically ZERO3)
ZERO3, // double [] reference_atr, // this view orientation relative to world frame (typically ZERO3)
true, // boolean distortedCamera, // camera view is distorted (false - rectilinear)
scene_xyz, // double [] camera_xyz, // camera center in world coordinates
scene_atr, // double [] camera_atr, // camera orientation relative to world frame
LINE_ERR); // double line_err) // threshold error in scan lines (1.0)
if (pXpYD[nTile] != null) {
if ( (pXpYD[nTile][0] < 0.0) ||
(pXpYD[nTile][1] < 0.0) ||
(pXpYD[nTile][0] > scene_width) ||
(pXpYD[nTile][1] > scene_height)) {
pXpYD[nTile] = null;
}
}
}
}
}
};
}
ImageDtt.startAndJoin(threads);
return pXpYD;
}
public static double [][] transformFromVirtual(
double [][] ref_pXpYD_0,
double [] disparity_ref,
final double [] scene_xyz, // camera center in world (reference) coordinates
final double [] scene_atr, // camera orientation relative to world (reference) frame
final QuadCLT reference_QuadClt,
final int around, // 2 search around for interpolation
final double sigma,
final int num_refines,
final String debugSuffix){
final boolean debug = debugSuffix != null;
final double sigma2 = 2*sigma*sigma;
final double normal_damping = 0.001; // pull to horizontal if not enough data
final double [] damping = new double [] {normal_damping, normal_damping};
TileProcessor tp = reference_QuadClt.getTileProcessor();
final int tilesX = tp.getTilesX();
final int tilesY = tp.getTilesY();
final int tiles = tilesY*tilesX;
final int tilesX_around = tilesX + 2 * around;
final int tilesY_around = tilesY + 2 * around;
final int tiles_around = tilesY_around * tilesX_around;
final int transform_size = tp.getTileSize();
final Thread[] threads = ImageDtt.newThreadArray(ImageDtt.THREADS_MAX);
final AtomicInteger ai = new AtomicInteger(0);
int initial_capacity = 4;
final int min_samples = 1;
final String debugTitle=debug?reference_QuadClt.getImageName()+"-"+debugSuffix : null;
String [] debug_frame_titles = {"X", "Y", "D"};
String [] debug_titles = new String[num_refines+1];
for (int i = 0; i <= num_refines; i++) {
debug_titles[i]=i+"";
}
// final double [][][] debug_data = debug ? new double [3][num_refines+1][tiles]:null;
final double [][][] debug_data = debug ? new double [3][num_refines+2][tiles]:null;
if (debug) {
for (int f = 0; f < debug_data.length; f++) {
for (int i = 0; i < debug_data[f].length; i++) {
Arrays.fill(debug_data[f][i], Double.NaN);
}
}
}
final ArrayList<ArrayList<Integer>> interp_list = new ArrayList<ArrayList<Integer>>(tiles_around);
for (int nTile = 0; nTile < tiles_around; nTile++) {
interp_list.add(new ArrayList<Integer>(initial_capacity));
}
// create uniform grid for initial interpolations
final double [][] reference_pXpYD = new double [tiles][];
for (int nrefine = 0; nrefine <= num_refines; nrefine++) {
if (nrefine == 0) {
ai.set(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nTile = ai.getAndIncrement(); nTile < tiles; nTile = ai.getAndIncrement()) if (!Double.isNaN(disparity_ref[nTile])){
double disparity = disparity_ref[nTile];
int tileY = nTile / tilesX;
int tileX = nTile % tilesX;
double centerX = (tileX + 0.5) * transform_size; // - shiftX;
double centerY = (tileY + 0.5) * transform_size; // - shiftY;
if (disparity < 0) {
disparity = 1.0* disparity; // 0.0;
}
reference_pXpYD[nTile] = new double[] {centerX, centerY, disparity};
}
}
};
}
ImageDtt.startAndJoin(threads);
} else {
// get scene pXpYD corresponding to reference_pXpYD
final double [][] scene_pXpYD = transformToScenePxPyD(
reference_pXpYD, // final double [][] reference_pXpYD,// invalid tiles - NaN in disparity. Should be no nulls, no NaN disparity
scene_xyz, // final double [] scene_xyz, // camera center in world (reference) coordinates
scene_atr, // final double [] scene_atr, // camera orientation relative to world (reference) frame
reference_QuadClt, // final QuadCLT reference_QuadClt) //
null); // final QuadCLT scene_QuadClt) //
// deep clone reference_pXpYD
final double [][] reference_pXpYD_next = new double [tiles][];
ai.set(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nTile = ai.getAndIncrement(); nTile < tiles_around; nTile = ai.getAndIncrement()) {
interp_list.get(nTile).clear(); // clear each list
}
}
};
}
ImageDtt.startAndJoin(threads);
// not multithreaded
for (int nTile = 0; nTile < tiles; nTile++) if (scene_pXpYD[nTile] != null) {
int tileX = (int) Math.floor (scene_pXpYD[nTile][0]/transform_size);
int tileY = (int) Math.floor (scene_pXpYD[nTile][1]/transform_size);
int tileX_around = tileX + around;
int tileY_around = tileY + around;
if ((tileX_around >= 0) && (tileY_around >= 0) && (tileX_around < tilesX_around) && (tileY_around < tilesY_around)) {
int tile_around = tileX_around + tilesX_around * tileY_around;
interp_list.get(tile_around).add(nTile);
}
}
// interpolate reference_pXpYD_next
ai.set(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
PolynomialApproximation pa = new PolynomialApproximation();
for (int nTile = ai.getAndIncrement(); nTile < tiles; nTile = ai.getAndIncrement()) {
int tileY = nTile / tilesX;
int tileX = nTile % tilesX;
double centerX = (tileX + 0.5) * transform_size; // - shiftX;
double centerY = (tileY + 0.5) * transform_size; // - shiftY;
int tileX_around = tileX + around;
int tileY_around = tileY + around;
int num_samples = 0;
for (int dy = -around; dy <= around; dy++) {
for (int dx = -around; dx <= around; dx++) {
int tile_around = (tileX_around + dx) + tilesX_around * (tileY_around + dy);
num_samples+=interp_list.get(tile_around).size();
}
}
int mindx = 0;
if (num_samples >= min_samples) {
// inter/extrapolate with regularization (for few samples) and weights
double [][][] mdata = new double [num_samples][3][];
for (int dy = -around; dy <= around; dy++) {
for (int dx = -around; dx <= around; dx++) {
int tile_around = (tileX_around + dx) + tilesX_around * (tileY_around + dy);
for (int nst:interp_list.get(tile_around)) {
double [] ref_xyd = reference_pXpYD[nst];
double [] scene_xyd = scene_pXpYD[nst];
double x = scene_xyd[0]-centerX;
double y = scene_xyd[1]-centerY;
double w = Math.exp (-(x*x + y*y)/sigma2);
mdata[mindx][0] = new double [2];
mdata[mindx][0][0] = x;
mdata[mindx][0][1] = y;
mdata[mindx][1] = new double [2]; // [3];
mdata[mindx][1][0] = ref_xyd[0];
mdata[mindx][1][1] = ref_xyd[1];
// mdata[mindx][1][2] = ref_xyd[2];
mdata[mindx][2] = new double [1];
mdata[mindx][2][0] = w;
mindx++;
}
}
}
double[][] approx2d = pa.quadraticApproximation(
mdata,
true, // boolean forceLinear, // use linear approximation
damping, // double [] damping, null OK
-1); // debug level
double px = approx2d[0][2];
double py = approx2d[1][2];
// interpolate disparity from disparity_ref
double tx = px/transform_size-0.5;
double ty = py/transform_size-0.5;
int tx0 = (int) Math.floor(tx);
int ty0 = (int) Math.floor(ty);
int tx1 = tx0+1;
int ty1 = ty0+1;
double kx = tx-tx0;
double ky = ty-ty0;
if (tx0 < 0) {
tx0 = 0;
tx1 = tx0+1;
kx = 0;
} else if (tx1 >= tilesX) {
tx1 = tilesX - 1;
tx0 = tx1-1;
kx = 1.0;
}
if (ty0 < 0) {
ty0 = 0;
ty1 = ty0+1;
ky = 0;
} else if (ty1 >= tilesY) {
ty1 = tilesY - 1;
ty0 = ty1-1;
ky = 1.0;
}
double d =
(1-kx)*(1-ky)*disparity_ref[tx0 + tilesX*ty0] +
( kx)*(1-ky)*disparity_ref[tx1 + tilesX*ty0] +
(1-kx)*( ky)*disparity_ref[tx0 + tilesX*ty1] +
( kx)*( ky)*disparity_ref[tx1 + tilesX*ty1];
reference_pXpYD_next[nTile] = new double[] {px, py, d};
}
}
}
};
}
ImageDtt.startAndJoin(threads);
System.arraycopy(
reference_pXpYD_next,
0,
reference_pXpYD,
0,
tiles);
// TODO: refine disparity from disparity_ref interpolation
} // if (nrefine == 0) else
if (debug) {
for (int nTile = 0; nTile < tiles; nTile++) if (reference_pXpYD[nTile] != null) {
debug_data[0][nrefine][nTile] = reference_pXpYD[nTile][0];
debug_data[1][nrefine][nTile] = reference_pXpYD[nTile][1];
debug_data[2][nrefine][nTile] = reference_pXpYD[nTile][2];
}
}
}
if (debug) {
for (int nTile = 0; nTile < tiles; nTile++) if (reference_pXpYD[nTile] != null) {
debug_data[0][num_refines+1][nTile] = ref_pXpYD_0[nTile][0];
debug_data[1][num_refines+1][nTile] = ref_pXpYD_0[nTile][1];
debug_data[2][num_refines+1][nTile] = ref_pXpYD_0[nTile][2];
}
ShowDoubleFloatArrays.showArraysHyperstack(
debug_data, // double[][][] pixels,
tilesX, // int width,
debugTitle, // String title, "time_derivs-rt"+diff_time_rt+"-rxy"+diff_time_rxy,
debug_titles, // String [] titles, // all slices*frames titles or just slice titles or null
debug_frame_titles, // String [] frame_titles, // frame titles or null
true); // boolean show)
}
return reference_pXpYD; // ref_pXpYD_0; //
}
//TODO: refine inter-scene pose to accommodate refined disparity map
/**
......@@ -3229,8 +3543,8 @@ public class OpticalFlow {
public static double [][] transformCameraVew(
final String title,
final double [][] dsrbg_camera_in,
final double [] scene_xyz, // camera center in world coordinates
final double [] scene_atr, // camera orientation relative to world frame
final double [] scene_xyz, // camera center in world coordinates (in the reference camera coordinates)
final double [] scene_atr, // camera orientation relative to world frame (in the reference camera coordinates)
final QuadCLT scene_QuadClt,
final QuadCLT reference_QuadClt,
final int iscale)
......@@ -3242,8 +3556,8 @@ public class OpticalFlow {
final int tilesY = tp.getTilesY();
final int tiles = tilesX*tilesY;
final int transform_size = tp.getTileSize();
final int rel_num_passes = 10;
final int num_passes = transform_size; // * 2;
/// final int rel_num_passes = 10;
/// final int num_passes = transform_size; // * 2;
final int stilesX = iscale*tilesX;
final int stilesY = iscale*tilesY;
......@@ -5603,7 +5917,8 @@ public class OpticalFlow {
ref_index, //int ref_index,
new int [] {earliest_scene, last_index}, //int [] range,
debugLevel); // int debugLevel);
cuas_atr = new double [] {-center_ATR[0][0],-center_ATR[0][1],-center_ATR[0][2]};
// cuas_atr = new double [] {-center_ATR[0][0],-center_ATR[0][1],-center_ATR[0][2]};
cuas_atr = new double [] { center_ATR[0][0], center_ATR[0][1], center_ATR[0][2]};
}
if (generate_egomotion) {
......@@ -5749,6 +6064,8 @@ public class OpticalFlow {
disparity_fg = ds[0]; // combo_dsn_final[COMBO_DSN_INDX_DISP_FG];
strength_fg = ds[1];
// BG mode
double [] bg_lma = combo_dsn_final[COMBO_DSN_INDX_DISP_BG_ALL].clone();
double [] bg_str = combo_dsn_final[COMBO_DSN_INDX_STRENGTH].clone();
......@@ -6483,6 +6800,7 @@ public class OpticalFlow {
null, // testr, // null, // final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters, // CLTParameters clt_parameters,
constant_disparity, // double [] disparity_ref,
null, // double [][] ref_pXpYD, // alternative to disparity_ref when reference is not uniform
ZERO3, // final double [] scene_xyz, // camera center in world coordinates
ZERO3, // new double[] {.1,0.1,.1}, // ZERO3, // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs[ref_index], // final QuadCLT scene,
......@@ -6501,6 +6819,7 @@ public class OpticalFlow {
null, // testr, // null, // final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters, // CLTParameters clt_parameters,
constant_disparity, // double [] disparity_ref,
null, // double [][] ref_pXpYD, // alternative to disparity_ref when reference is not uniform
ZERO3, // final double [] scene_xyz, // camera center in world coordinates
ZERO3, // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs[ref_index], // final QuadCLT scene,
......@@ -6581,6 +6900,7 @@ public class OpticalFlow {
null, // final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters, // CLTParameters clt_parameters,
ds_fg_virt[0], // fg_disparity, // double [] disparity_ref,
null, // double [][] ref_pXpYD, // alternative to disparity_ref when reference is not uniform
xyz_offset, // ZERO3, // final double [] scene_xyz, // camera center in world coordinates
ZERO3, // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs[ref_index], // final QuadCLT scene,
......@@ -6599,6 +6919,7 @@ public class OpticalFlow {
null, // final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters, // CLTParameters clt_parameters,
ds_fg_virt[0], // fg_disparity, // double [] disparity_ref,
null, // double [][] ref_pXpYD, // alternative to disparity_ref when reference is not uniform
xyz_offset, // ZERO3, // final double [] scene_xyz, // camera center in world coordinates
ZERO3, // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs[ref_index], // final QuadCLT scene,
......@@ -6626,6 +6947,7 @@ public class OpticalFlow {
null, // final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters, // CLTParameters clt_parameters,
bg_disparity, // double [] disparity_ref,
null, // double [][] ref_pXpYD, // alternative to disparity_ref when reference is not uniform
ZERO3, // final double [] scene_xyz, // camera center in world coordinates
ZERO3, // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs[ref_index], // final QuadCLT scene,
......@@ -6644,6 +6966,7 @@ public class OpticalFlow {
null, // final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters, // CLTParameters clt_parameters,
bg_disparity, // double [] disparity_ref,
null, // double [][] ref_pXpYD, // alternative to disparity_ref when reference is not uniform
ZERO3, // final double [] scene_xyz, // camera center in world coordinates
ZERO3, // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs[ref_index], // final QuadCLT scene,
......@@ -7214,6 +7537,7 @@ public class OpticalFlow {
null, // final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters, // CLTParameters clt_parameters,
disparity_fg, // double [] disparity_ref,
null, // double [][] ref_pXpYD, // alternative to disparity_ref when reference is not uniform
scene_xyz, // final double [] scene_xyz, // camera center in world coordinates
scene_atr, // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs[nscene], // final QuadCLT scene,
......@@ -7238,6 +7562,7 @@ public class OpticalFlow {
null, // final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters, // CLTParameters clt_parameters,
disparity_fg, // double [] disparity_ref,
null, // double [][] ref_pXpYD, // alternative to disparity_ref when reference is not uniform
scene_xyz, // final double [] scene_xyz, // camera center in world coordinates
scene_atr, // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs[nscene], // final QuadCLT scene,
......@@ -7359,6 +7684,7 @@ public class OpticalFlow {
null, // final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters, // CLTParameters clt_parameters,
disparity_fg, // double [] disparity_ref,
null, // double [][] ref_pXpYD, // alternative to disparity_ref when reference is not uniform
scene_xyz, // final double [] scene_xyz, // camera center in world coordinates
scene_atr, // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs[nscene], // final QuadCLT scene,
......@@ -7382,6 +7708,7 @@ public class OpticalFlow {
null, // final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters, // CLTParameters clt_parameters,
disparity_fg, // double [] disparity_ref,
null, // double [][] ref_pXpYD, // alternative to disparity_ref when reference is not uniform
scene_xyz, // final double [] scene_xyz, // camera center in world coordinates
scene_atr, // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs[nscene], // final QuadCLT scene,
......@@ -7522,6 +7849,7 @@ public class OpticalFlow {
}
if (mode_cuas) {
suffix+="-CUAS"; // add properties too? include offsets
suffix+=String.format("%6f:%6f:%6f", stereo_atr[0],stereo_atr[1],stereo_atr[2]);
}
if (!mb_en) {
suffix+="-NOMB"; // no motion blur
......@@ -7535,7 +7863,10 @@ public class OpticalFlow {
for (int i = 0; i < num_sens; i++) if (((sensor_mask >> i) & 1) != 0) channels[nch++] = i;
ImageStack stack_scenes = null;
int dbg_scene = -95;
double [][] ref_pXpYD = transformToScenePxPyD( // now should work with offset ref_scene
double [][] ref_pXpYD;
double [][] ref_pXpYD_or_null = null; // debugging cuas mode keeping old
if (mode_cuas) { // && (dbg_scene > 0)) {
double [][] ref_pXpYD_0 = transformToScenePxPyD( // now should work with offset ref_scene
fov_tiles, // final Rectangle [] extra_woi, // show larger than sensor WOI (or null)
ref_disparity, // final double [] disparity_ref, // invalid tiles - NaN in disparity
ZERO3, // stereo_xyz, // ZERO3, // final double [] scene_xyz, // camera center in world coordinates
......@@ -7544,6 +7875,34 @@ public class OpticalFlow {
quadCLTs[ref_index], // final QuadCLT reference_QuadClt, // now - may be null - for testing if scene is rotated ref
threadsMax); // int threadsMax)
int around = 2;
double around_sigma = 4.0;
int num_virtual_refines = 3;
String debugSuffix= null; // "virtual";
ref_pXpYD= transformFromVirtual(
ref_pXpYD_0, // double [][] ref_pXpYD_0,
ref_disparity, // double [] disparity_ref,
stereo_xyz, // final double [] scene_xyz, // camera center in world (reference) coordinates
stereo_atr, // final double [] scene_atr, // camera orientation relative to world (reference) frame
quadCLTs[ref_index], // final QuadCLT reference_QuadClt,
around, // final int around, // 2 search around for interpolation
around_sigma, // final double sigma,
num_virtual_refines, // final int num_refines,
debugSuffix); // final String debugSuffix)
ref_pXpYD_or_null = ref_pXpYD;
quadCLTs[ref_index].getErsCorrection().setupERS();
System.out.println("Calculated virtual_PxPyD");
} else {
ref_pXpYD = transformToScenePxPyD( // now should work with offset ref_scene
fov_tiles, // final Rectangle [] extra_woi, // show larger than sensor WOI (or null)
ref_disparity, // final double [] disparity_ref, // invalid tiles - NaN in disparity
ZERO3, // stereo_xyz, // ZERO3, // final double [] scene_xyz, // camera center in world coordinates
ZERO3, // stereo_atr, // ZERO3, // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs[ref_index], // final QuadCLT scene_QuadClt,
quadCLTs[ref_index], // final QuadCLT reference_QuadClt, // now - may be null - for testing if scene is rotated ref
threadsMax); // int threadsMax)
// ref_pXpYD_or_null = ref_pXpYD; // remove after debug!. Yes, that spoils!
}
for (int nscene = 0; nscene < quadCLTs.length ; nscene++) if (quadCLTs[nscene] != null){
if (nscene== dbg_scene) {
System.out.println("renderSceneSequence(): nscene = "+nscene);
......@@ -7572,7 +7931,7 @@ public class OpticalFlow {
scene_atr = ZERO3;
}
}
if (stereo_xyz != null) { // offset all, including reference scene - now always, it is never null
if (!mode_cuas && (stereo_xyz != null)) { // offset all, including reference scene - now always, it is never null
double [][] combo_xyzatr = ErsCorrection.combineXYZATR(
stereo_xyz, // double [] reference_xyz,
stereo_atr, // double [] reference_atr,
......@@ -7612,6 +7971,7 @@ public class OpticalFlow {
null, // final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters, // CLTParameters clt_parameters,
ref_disparity, // double [] disparity_ref,
ref_pXpYD_or_null, // double [][] ref_pXpYD, // alternative to disparity_ref when reference is not uniform
// motion blur compensation
mb_tau, // double mb_tau, // 0.008; // time constant, sec
mb_max_gain, // double mb_max_gain, // 5.0; // motion blur maximal gain (if more - move second point more than a pixel
......@@ -7634,6 +7994,7 @@ public class OpticalFlow {
fov_tiles, // testr, // null, // final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters, // CLTParameters clt_parameters,
ref_disparity, // double [] disparity_ref,
ref_pXpYD_or_null, // double [][] ref_pXpYD, // alternative to disparity_ref when reference is not uniform
// not used, just as null/not null now
// null means uniform grid, no view transform. even with 0 rot ERS was changing results
((!corr_raw_ers && (mode3d<0))? null:scene_xyz), // final double [] scene_xyz, // camera center in world coordinates
......
......@@ -876,6 +876,8 @@ public class QuadCLT extends QuadCLTCPU {
null, // final Rectangle full_woi_in, // show larger than sensor WOI in tiles (or null)
clt_parameters, // CLTParameters clt_parameters,
disparity_ref, // double [] disparity_ref,
null, // double [][] ref_pXpYD, // alternative to disparity_ref when reference is not uniform
OpticalFlow.ZERO3, // final double [] scene_xyz, // camera center in world coordinates
OpticalFlow.ZERO3, // final double [] scene_atr, // camera orientation relative to world frame
ref_scene, // final QuadCLT scene,
......@@ -1254,6 +1256,7 @@ public class QuadCLT extends QuadCLTCPU {
final Rectangle full_woi_in, // show larger than sensor WOI in tiles (or null)
CLTParameters clt_parameters,
double [] disparity_ref,
double [][] ref_pXpYD, // alternative to disparity_ref when reference is not uniform
final double [] scene_xyz, // camera center in world coordinates
final double [] scene_atr, // camera orientation relative to world frame
final QuadCLT scene,
......@@ -1269,6 +1272,7 @@ public class QuadCLT extends QuadCLTCPU {
full_woi_in, // show larger than sensor WOI in tiles (or null)
clt_parameters,
disparity_ref,
ref_pXpYD, // double [][] ref_pXpYD, // alternative to disparity_ref when reference is not uniform
// motion blur compensation
0.0, // double mb_tau, // 0.008; // time constant, sec
0.0, // mb_max_gain, // 5.0; // motion blur maximal gain (if more - move second point more than a pixel
......@@ -1313,6 +1317,7 @@ public class QuadCLT extends QuadCLTCPU {
final Rectangle full_woi_in, // show larger than sensor WOI in tiles (or null)
CLTParameters clt_parameters,
double [] disparity_ref,
double [][] ref_pXpYD, // alternative to disparity_ref when reference is not uniform
// motion blur compensation
double mb_tau, // 0.008; // time constant, sec
double mb_max_gain, // 5.0; // motion blur maximal gain (if more - move second point more than a pixel
......@@ -1328,6 +1333,17 @@ public class QuadCLT extends QuadCLTCPU {
int threadsMax,
final int debugLevel){
double [][] pXpYD;
if (ref_pXpYD != null) { // cuas mode, ref_pXpYD defines offset reference scene
pXpYD=OpticalFlow.transformToScenePxPyD(
ref_pXpYD, // final double [][] reference_pXpYD, // invalid tiles - NaN in disparity. Should be no nulls, no NaN disparity
scene_xyz, // final double [] scene_xyz, // camera center in world (reference) coordinates
scene_atr, // final double [] scene_atr, // camera orientation relative to world (reference) frame
ref_scene, // final QuadCLT reference_QuadClt)
scene); // final QuadCLT scene_QuadClt)
scene.getErsCorrection().setupERS(); // NEW - did not help
// ref_scene.getErsCorrection().setupERS(); // just in case - setup using instance parameters - inside
} else {
if ((scene_xyz == null) || (scene_atr == null)) {
scene_xyz = new double[3];
scene_atr = new double[3];
......@@ -1349,6 +1365,7 @@ public class QuadCLT extends QuadCLTCPU {
ref_scene, // final QuadCLT reference_QuadClt, // now - may be null - for testing if scene is rotated ref
threadsMax); // int threadsMax)
}
}
int rendered_width = scene.getErsCorrection().getSensorWH()[0];
if (full_woi_in != null) {
rendered_width = full_woi_in.width * GPUTileProcessor.DTT_SIZE;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment