Commit b9f88154 authored by Andrey Filippov's avatar Andrey Filippov

Matching image pairs

parent fec6933a
......@@ -1347,7 +1347,38 @@ public class EyesisCorrectionParameters {
return true;
}
// get list of source files in a directory
public String[] selectSourceFileInSet(String setdir, int debugLevel) {
int num_chn_main = numSubCameras;
int num_chn_aux = ((aux_camera != null)? aux_camera.numSubCameras : 0);
int num_chn_files = num_chn_main + num_chn_aux;
String [] extensions = getSourceSuffixes();// ={this.sourceSuffix};
String [] prefixes = getSourcePrefixes();
extensions[0] = sourceSuffix;
prefixes[0] = sourcePrefix;
MultipleExtensionsFileFilter setFilter = new MultipleExtensionsFileFilter(prefixes,extensions,"Image sets");
File fsetdir = new File(setdir);
ArrayList<File> setFilesList = new ArrayList<File>(); // list of set files
File [] setChnFiles = fsetdir.listFiles(setFilter);
int num_match = setChnFiles.length;
if ( (num_match == num_chn_files) || // all files for main and aux
(num_match == num_chn_main) || // only main camera files
(num_match == num_chn_aux)) // only aux camera files
{ // only use sets of exact number of files
for (File f: setChnFiles) {
setFilesList.add(f);
}
}
String [] sourcePaths = new String[setFilesList.size()];
for (int nFile = 0; nFile < sourcePaths.length; nFile++) {
sourcePaths[nFile]= setFilesList.get(nFile).getPath();
}
return sourcePaths;
}
public boolean selectSourceFiles(boolean allFiles) {
return selectSourceFiles(allFiles, 1); // debug level 1 - modify here
......
......@@ -294,6 +294,11 @@ public class EyesisCorrections {
// FIXME: Use only this PixelMappings's channels, do not mix exposures from main/aux!
public double [] calcReferenceExposures(int debugLevel){
String [] paths=this.correctionsParameters.getSourcePaths();
return calcReferenceExposures(paths, debugLevel);
}
public double [] calcReferenceExposures(String [] paths,int debugLevel){
// String [] paths=this.correctionsParameters.getSourcePaths();
double [] exposures=new double [paths.length];
if (this.correctionsParameters.exposureCorrectionMode<2){
for (int nFile=0;nFile<paths.length;nFile++) {
......
......@@ -699,6 +699,7 @@ private Panel panel1,
addButton("LIST extrinsics", panelClt5, color_report);
addButton("DSI histogram", panelClt5, color_report);
addButton("ML recalc", panelClt5, color_process);
addButton("Inter Test", panelClt5, color_stop);
plugInFrame.add(panelClt5);
}
......@@ -5099,7 +5100,14 @@ private Panel panel1,
CLT_PARAMETERS.batch_run = true;
batchLwir();
return;
/* ======================================================================== */
} else if (label.equals("Inter Test")) {
DEBUG_LEVEL=MASTER_DEBUG_LEVEL;
EYESIS_CORRECTIONS.setDebug(DEBUG_LEVEL);
CLT_PARAMETERS.batch_run = true;
testInterScene();
return;
/* ======================================================================== */
} else if (label.equals("CLT rig edit")) {
DEBUG_LEVEL=MASTER_DEBUG_LEVEL;
......@@ -5700,6 +5708,9 @@ private Panel panel1,
return true;
}
public boolean getPairImages2() {
if (!prepareRigImages()) return false;
String configPath=getSaveCongigPath();
......@@ -6378,7 +6389,96 @@ private Panel panel1,
}
public boolean testInterScene() {
long startTime=System.nanoTime();
// load needed sensor and kernels files
if (!prepareRigImages()) return false;
String configPath=getSaveCongigPath();
if (configPath.equals("ABORT")) return false;
setAllProperties(PROPERTIES); // batchRig may save properties with the model. Extrinsics will be updated, others should be set here
if (DEBUG_LEVEL > -2){
System.out.println("++++++++++++++ Testing Interscene processing ++++++++++++++");
}
if (CLT_PARAMETERS.useGPU()) { // only init GPU instances if it is used
if (GPU_TILE_PROCESSOR == null) {
try {
GPU_TILE_PROCESSOR = new GPUTileProcessor(CORRECTION_PARAMETERS.tile_processor_gpu);
} catch (Exception e) {
System.out.println("Failed to initialize GPU class");
// TODO Auto-generated catch block
e.printStackTrace();
return false;
} //final int debugLevel);
}
if (CLT_PARAMETERS.useGPU(false) && (QUAD_CLT != null) && (GPU_QUAD == null)) { // if GPU main is needed
try {
GPU_QUAD = GPU_TILE_PROCESSOR.new GpuQuad(
QUAD_CLT,
4,
3);
} catch (Exception e) {
System.out.println("Failed to initialize GpuQuad class");
// TODO Auto-generated catch block
e.printStackTrace();
return false;
} //final int debugLevel);
QUAD_CLT.setGPU(GPU_QUAD);
}
/*
if (CLT_PARAMETERS.useGPU(true) && (QUAD_CLT_AUX != null) && (GPU_QUAD_AUX == null)) { // if GPU AUX is needed
try {
GPU_QUAD_AUX = GPU_TILE_PROCESSOR. new GpuQuad(//
QUAD_CLT_AUX,
4,
3);
} catch (Exception e) {
System.out.println("Failed to initialize GpuQuad class");
// TODO Auto-generated catch block
e.printStackTrace();
return false;
} //final int debugLevel);
QUAD_CLT_AUX.setGPU(GPU_QUAD_AUX);
}*/
}
try {
TWO_QUAD_CLT.TestInterScene(
QUAD_CLT, // QuadCLT quadCLT_main,
// QUAD_CLT_AUX, // QuadCLT quadCLT_aux,
CLT_PARAMETERS, // EyesisCorrectionParameters.DCTParameters dct_parameters,
DEBAYER_PARAMETERS, //EyesisCorrectionParameters.DebayerParameters debayerParameters,
COLOR_PROC_PARAMETERS, //EyesisCorrectionParameters.ColorProcParameters colorProcParameters,
COLOR_PROC_PARAMETERS_AUX, //EyesisCorrectionParameters.ColorProcParameters colorProcParameters_aux,
CHANNEL_GAINS_PARAMETERS, //CorrectionColorProc.ColorGainsParameters channelGainParameters,
RGB_PARAMETERS, //EyesisCorrectionParameters.RGBParameters rgbParameters,
EQUIRECTANGULAR_PARAMETERS, // EyesisCorrectionParameters.EquirectangularParameters equirectangularParameters,
PROPERTIES, // Properties properties,
THREADS_MAX, //final int threadsMax, // maximal number of threads to launch
UPDATE_STATUS, //final boolean updateStatus,
DEBUG_LEVEL);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
} //final int debugLevel);
if (configPath!=null) {
saveTimestampedProperties( // save config again
configPath, // full path or null
null, // use as default directory if path==null
true,
PROPERTIES);
}
System.out.println("batchRig(): Processing finished at "+
IJ.d2s(0.000000001*(System.nanoTime()-startTime),3)+" sec, --- Free memory="+
Runtime.getRuntime().freeMemory()+" (of "+Runtime.getRuntime().totalMemory()+")");
return true;
}
public boolean exportMLData() {
long startTime=System.nanoTime();
......
......@@ -590,7 +590,7 @@ public class GPUTileProcessor {
}
public class GpuQuad{ // quad camera description
public final QuadCLT quadCLT;
public QuadCLT quadCLT;
public final int img_width;
public final int img_height;
public final int kernels_hor; // int kernels_hor,
......@@ -657,6 +657,16 @@ public class GPUTileProcessor {
private boolean geometry_correction_set = false;
private boolean geometry_correction_vector_set = false;
public int gpu_debug_level = 1;
// should only be updated with the same cameras instance
public void updateQuadCLT(final QuadCLT quadCLT) {
this.quadCLT = quadCLT;
resetGeometryCorrection();
resetGeometryCorrectionVector();
bayer_set = false;
}
public QuadCLT getQuadCLT( ) {
return this.quadCLT;
}
public GpuQuad(
final QuadCLT quadCLT,
// final int img_width,
......
......@@ -614,6 +614,15 @@ public class CLTPass3d{
}
calc_disparity_combo = calc_disparity.clone(); // for now - just clone, can be modified separately and combined with hor/vert
}
// bypassing calculations
public void setCalcDisparityStrength(
double [] disparity,
double [] strength) {
this.calc_disparity = disparity;
this.strength = strength;
calc_disparity_combo = calc_disparity.clone();
}
/**
* Replaces current combo disparity for tiles that are weak and do not have any neighbor within disparity range from this one
......
......@@ -25,7 +25,9 @@
package com.elphel.imagej.tileprocessor;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Properties;
import org.apache.commons.math3.complex.Quaternion;
import org.apache.commons.math3.geometry.euclidean.threed.Rotation;
......@@ -36,21 +38,175 @@ import org.apache.commons.math3.geometry.euclidean.threed.Vector3D;
import Jama.Matrix;
public class ErsCorrection extends GeometryCorrection {
public static RotationConvention ROT_CONV = RotationConvention.FRAME_TRANSFORM;
public static double THRESHOLD = 1E-10;
static final String XYZ_PREFIX = "xyz";
static final String ATR_PREFIX = "atr";
static final String ERS_PREFIX = "ers";
static final String SCENES_PREFIX = "scenes";
static final String ERS_XYZ_PREFIX = ERS_PREFIX + "_xyz";
static final String ERS_XYZ_DT_PREFIX = ERS_PREFIX + "_xyz_dt";
static final String ERS_XYZ_D2T_PREFIX = ERS_PREFIX + "_xyz_d2t";
static final String ERS_ATR_DT_PREFIX = ERS_PREFIX + "_atr_dt";
static final String ERS_ATR_D2T_PREFIX = ERS_PREFIX + "_atr_d2t";
static final RotationConvention ROT_CONV = RotationConvention.FRAME_TRANSFORM;
static final double THRESHOLD = 1E-10;
// parameters for the ERS distortion calculation
public double [] ers_wxyz_center; // world camera XYZ (meters) for the lens center (in camera coordinates, typically 0)
public double [] ers_wxyz_center_dt; // world camera Vx, Vy, Vz (m/s)
public double [] ers_wxyz_center_d2t; // world camera Vx, Vy, Vz (m/s^2)
public double [] ers_watr_center_dt; // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
public double [] ers_watr_center_d2t; // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
// absolute position and orientation of this camera in World coordinates
public double [] camera_xyz = new double[3]; // camera center in world coordinates
public double [] camera_atr = new double[3]; // camera orientation relative to world frame
public HashMap <String, XyzAtr> scenes_poses = new HashMap <String, XyzAtr>(); // scene timestamp as a key
// Save above data through properties, separately scenes_poses(relative to this), this camera pose camera pose
// Rotation rotation; // (double[][] m, double THRESHOLD)
double [][] ers_xyz; // per scan line
double [][] ers_xyz_dt; // linear velocitiesper scan line
Quaternion[] ers_quaternion; // per scan line
Quaternion[] ers_quaternion_dt; // per scan line
double [][] ers_atr; // azimuth-tilt-roll per scan line
double [][] ers_atr_dt; // angular velocities per scan line
private double [][] ers_xyz; // per scan line
private double [][] ers_xyz_dt; // linear velocitiesper scan line
private Quaternion[] ers_quaternion; // per scan line
private Quaternion[] ers_quaternion_dt; // per scan line
private double [][] ers_atr; // azimuth-tilt-roll per scan line
private double [][] ers_atr_dt; // angular velocities per scan line
public void setPose(
double [] camera_xyz,
double [] camera_atr) {
this.camera_xyz = camera_xyz;
this.camera_atr = camera_atr;
}
public double [] getCameraXYZ() {
return camera_xyz;
}
public double [] getCameraATR() {
return camera_atr;
}
public void setPropertiesPose(String prefix, Properties properties){
properties.setProperty(prefix+XYZ_PREFIX, String.format("%f, %f, %f", camera_xyz[0], camera_xyz[1], camera_xyz[2]));
properties.setProperty(prefix+ATR_PREFIX, String.format("%f, %f, %f", camera_atr[0], camera_atr[1], camera_atr[2]));
}
public boolean getPropertiesPose(String prefix,Properties properties){
boolean got_data = false;
if (properties.getProperty(prefix+XYZ_PREFIX)!=null) {camera_xyz = parseDoublesCSV(properties.getProperty(prefix+XYZ_PREFIX)); got_data=true;}
if (properties.getProperty(prefix+ATR_PREFIX)!=null) {camera_atr = parseDoublesCSV(properties.getProperty(prefix+ATR_PREFIX)); got_data=true;}
return got_data;
}
public void setPropertiesERS(String prefix, Properties properties){
properties.setProperty(prefix+ERS_XYZ_PREFIX, String.format("%f, %f, %f", ers_wxyz_center[0], ers_wxyz_center[1], ers_wxyz_center[2]));
properties.setProperty(prefix+ERS_XYZ_DT_PREFIX, String.format("%f, %f, %f", ers_wxyz_center_dt[0], ers_wxyz_center_dt[1], ers_wxyz_center_dt[2]));
properties.setProperty(prefix+ERS_XYZ_D2T_PREFIX, String.format("%f, %f, %f", ers_wxyz_center_d2t[0], ers_wxyz_center_d2t[1], ers_wxyz_center_d2t[2]));
properties.setProperty(prefix+ERS_ATR_DT_PREFIX, String.format("%f, %f, %f", ers_watr_center_dt[0], ers_watr_center_dt[1], ers_watr_center_dt[2]));
properties.setProperty(prefix+ERS_ATR_D2T_PREFIX, String.format("%f, %f, %f", ers_watr_center_d2t[0], ers_watr_center_d2t[1], ers_watr_center_d2t[2]));
}
public boolean getPropertiesERS(String prefix,Properties properties){
boolean got_data = false;
if (properties.getProperty(prefix+ERS_XYZ_PREFIX)!=null) {ers_wxyz_center = parseDoublesCSV(properties.getProperty(prefix+XYZ_PREFIX)); got_data=true;}
if (properties.getProperty(prefix+ERS_XYZ_DT_PREFIX)!=null) {ers_wxyz_center_dt = parseDoublesCSV(properties.getProperty(prefix+ATR_PREFIX)); got_data=true;}
if (properties.getProperty(prefix+ERS_XYZ_D2T_PREFIX)!=null) {ers_wxyz_center_d2t = parseDoublesCSV(properties.getProperty(prefix+ATR_PREFIX)); got_data=true;}
if (properties.getProperty(prefix+ERS_ATR_DT_PREFIX)!=null) {ers_watr_center_dt = parseDoublesCSV(properties.getProperty(prefix+ERS_ATR_DT_PREFIX)); got_data=true;}
if (properties.getProperty(prefix+ERS_ATR_D2T_PREFIX)!=null) {ers_watr_center_d2t = parseDoublesCSV(properties.getProperty(prefix+ERS_ATR_D2T_PREFIX)); got_data=true;}
if (got_data) {
setupERS(); // calculate arrays
}
return got_data;
}
public void setPropertiesScenes(String prefix, Properties properties){
String [] timestamps = getScenes();
for (String k : timestamps) {
properties.setProperty(prefix+SCENES_PREFIX+"_"+k, getScene(k).toString());
}
}
// do not forget to reset scenes when switching to a new "this" scene.
public boolean getPropertiesScenes(String parent_prefix,Properties properties){
boolean got_data = false;
ArrayList<String> timestamps = new ArrayList<String>();
String prefix = parent_prefix+SCENES_PREFIX+"_";
for (Enumeration<?> e = properties.propertyNames(); e.hasMoreElements();) {
String key = (String) e.nextElement();
if (key.startsWith(prefix)) {
timestamps.add(key.substring(prefix.length()));
}
}
if (!timestamps.isEmpty()) {
got_data = true;
for (String ts:timestamps) {
addScene(ts, new XyzAtr(properties.getProperty(prefix+ts)));
}
}
return got_data;
}
//propertyNames()
public double [] parseDoublesCSV(String s) {
String[] snumbers = s.split(",");
double [] data = new double [snumbers.length];
for (int i = 0; i < data.length; i++) {
data[i] = Double.parseDouble(snumbers[i]);
}
return data;
}
public void resetScenes() {
scenes_poses = new HashMap <String, XyzAtr>();
}
public void removeScene(String timestamp) {
scenes_poses.remove(timestamp);
}
public String [] getScenes() {
int num_scenes = scenes_poses.size();
String [] scenes = new String[num_scenes];
int i = 0;
for (String ts:scenes_poses.keySet()) {
scenes[i++] = ts;
}
return scenes;
}
public void addScene(String timestamp, XyzAtr scene) {
scenes_poses.put(timestamp, scene);
}
public void addScene(String timestamp, double [] xyz, double [] atr) {
scenes_poses.put(timestamp, new XyzAtr(xyz, atr));
}
public XyzAtr getScene(String timestamp) { // null if not found
return scenes_poses.get(timestamp);
}
public double[] getSceneXYZ(String timestamp) {
XyzAtr scene = scenes_poses.get(timestamp);
if (scene == null) return null;
return scene.getXYZ();
}
public double[] getSceneATR(String timestamp) {
XyzAtr scene = scenes_poses.get(timestamp);
if (scene == null) return null;
return scene.getATR();
}
/**
* Position+orientation (world XYZ, Azimuth, Tilt, Roll) of other scenes relative to the position of this camera.
* Positions/orientations are sampled during scanning of the center line
*/
HashMap <String, XyzAtr> scenes_poses = null;
public class XyzAtr {
double [] xyz;
double [] atr;
......@@ -58,14 +214,16 @@ public class ErsCorrection extends GeometryCorrection {
xyz = new double[3];
atr = new double[3];
}
public XyzAtr(double [] xyz, double [] atr) {
this.xyz = xyz;
this.atr = atr;
}
public XyzAtr(String s) {
ArrayList<Double> lxyzatr = new ArrayList<Double>();
for (String snumber : s.split(","))
lxyzatr.add(Double.parseDouble(snumber));
Double [] xyzatr = new Double[6];
xyzatr = lxyzatr.toArray(xyzatr);
xyz = new double [] {xyzatr[0],xyzatr[1],xyzatr[2]};
atr = new double [] {xyzatr[3],xyzatr[4],xyzatr[5]};
double [] d = parseDoublesCSV(s);
xyz = new double [] {d[0], d[1], d[2]};
atr = new double [] {d[3], d[4], d[5]};
}
public String toString() {
......@@ -90,8 +248,8 @@ public class ErsCorrection extends GeometryCorrection {
}
public ErsCorrection(GeometryCorrection gc) {
// use deep=true for the independent instance (clone), false - to "upgrade" GeometryCorrection to ErsCorrection
public ErsCorrection(GeometryCorrection gc, boolean deep) {
debugLevel = gc.debugLevel;
line_time = gc.line_time; // 26.5E-6; // duration of sensor scan line (for ERS)
pixelCorrectionWidth= gc.pixelCorrectionWidth; // 2592; // virtual camera center is at (pixelCorrectionWidth/2, pixelCorrectionHeight/2)
......@@ -110,6 +268,7 @@ public class ErsCorrection extends GeometryCorrection {
elevation = gc.elevation; // 0.0; // degrees, up - positive;
heading = gc.heading; // 0.0; // degrees, CW (from top) - positive
numSensors = gc.numSensors; // 4;
forward = gc.forward; // null;
right = gc.right; // null;
height = gc.height; // null;
......@@ -133,6 +292,91 @@ public class ErsCorrection extends GeometryCorrection {
extrinsic_corr = gc.extrinsic_corr; // ;
rigOffset = gc.rigOffset; // = null;
woi_tops = gc.woi_tops; // = null; // used to calculate scanline timing
if (deep) {
forward = clone1d(forward);
right = clone1d(right);
height = clone1d(height);
roll = clone1d(roll);
pXY0 = clone2d(pXY0);
XYZ_he = clone2d(XYZ_he);
XYZ_her = clone2d(XYZ_her);
rXY = clone2d(rXY);
rXY_ideal = clone2d(rXY_ideal);
rByRDist = clone1d(rByRDist); // probably it is not needed
extrinsic_corr = extrinsic_corr.clone();
if (rigOffset!=null) rigOffset = rigOffset.clone();
woi_tops = clone1d(woi_tops);
}
resetScenes(); // no scenes yet
// generate initial ers velocity and roll
setupERSfromExtrinsics();
}
public static double [] clone1d(double [] din){
if (din == null) return null;
return din.clone();
}
public static int [] clone1d(int [] din){
if (din == null) return null;
return din.clone();
}
public static boolean [] clone1d(boolean [] din){
if (din == null) return null;
return din.clone();
}
public static double [][] clone2d(double [][] din){
if (din == null) return null;
double [][] dout = new double [din.length][];
for (int i = 0; i < dout.length; i++) {
dout[i] = clone1d(din[i]);
}
return dout;
}
public static int [][] clone2d(int [][] din){
if (din == null) return null;
int [][] dout = new int [din.length][];
for (int i = 0; i < dout.length; i++) {
dout[i] = clone1d(din[i]);
}
return dout;
}
public static boolean [][] clone2d(boolean [][] din){
if (din == null) return null;
boolean [][] dout = new boolean [din.length][];
for (int i = 0; i < dout.length; i++) {
dout[i] = clone1d(din[i]);
}
return dout;
}
public static double [][][] clone3d(double [][][] din){
if (din == null) return null;
double [][][] dout = new double [din.length][][];
for (int i = 0; i < dout.length; i++) {
dout[i] = clone2d(din[i]);
}
return dout;
}
// setup from extrinsics vector
public void setupERSfromExtrinsics()
{
double [] ersv = getCorrVector().getIMU();
setupERS(
new double [3], // double [] wxyz_center, // world camera XYZ (meters) for the frame center
new double [] {ersv[3], ersv[4], ersv[5]}, // double [] wxyz_center_dt, // world camera Vx, Vy, Vz (m/s)
new double [3], // double [] wxyz_center_d2t, // world camera Vx, Vy, Vz (m/s^2)
// double [] watr_center, // camera orientation (az, tilt, roll in radians, corresponding to the frame center)
// new double [] {ersv[1], ersv[0], ersv[2]}, // double [] watr_center_dt, // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
// REVERSING tilt sign !
new double [] {ersv[1], -ersv[0], ersv[2]}, // double [] watr_center_dt, // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
new double [3]); // double [] watr_center_d2t) // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
}
public void setupERS(
......@@ -142,6 +386,16 @@ public class ErsCorrection extends GeometryCorrection {
// double [] watr_center, // camera orientation (az, tilt, roll in radians, corresponding to the frame center)
double [] watr_center_dt, // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
double [] watr_center_d2t) // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
{
this.ers_wxyz_center = wxyz_center; // world camera XYZ (meters) for the lens center (in camera coordinates, typically 0)
this.ers_wxyz_center_dt = wxyz_center_dt; // world camera Vx, Vy, Vz (m/s)
this.ers_wxyz_center_d2t = wxyz_center_d2t; // world camera Vx, Vy, Vz (m/s^2)
this.ers_watr_center_dt = watr_center_dt; // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
this.ers_watr_center_d2t = watr_center_d2t; // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
setupERS();
}
public void setupERS()
{
ers_xyz= new double [pixelCorrectionHeight][3];
ers_xyz_dt= new double [pixelCorrectionHeight][3];
......@@ -156,14 +410,14 @@ public class ErsCorrection extends GeometryCorrection {
Rotation rcenter0= new Rotation(RotationOrder.YXZ, ROT_CONV, 0.0, 0.0, 0.0);
Quaternion quat_center0 = new Quaternion (rcenter0.getQ0(),rcenter0.getQ1(),rcenter0.getQ2(),rcenter0.getQ3());
Quaternion quat_center1 = new Quaternion (0.0,watr_center_dt[1], watr_center_dt[0], watr_center_dt[2]); // angular velocity 1/s :tilt, az, roll
Quaternion quat_center2 = new Quaternion (0.0,watr_center_d2t[1], watr_center_d2t[0], watr_center_d2t[2]); // angular velocity 1/s :tilt, az, roll
Quaternion quat_center1 = new Quaternion (0.0,ers_watr_center_dt[1], ers_watr_center_dt[0], ers_watr_center_dt[2]); // angular velocity 1/s :tilt, az, roll
Quaternion quat_center2 = new Quaternion (0.0,ers_watr_center_d2t[1], ers_watr_center_d2t[0], ers_watr_center_d2t[2]); // angular velocity 1/s :tilt, az, roll
// integration to the bottom of the image
double dt = line_time;
double [] wxy0 = wxyz_center.clone();
double [] wxy1 = wxyz_center_dt.clone();
double [] wxy2 = wxyz_center_d2t.clone();
double [] wxy0 = ers_wxyz_center.clone();
double [] wxy1 = ers_wxyz_center_dt.clone();
double [] wxy2 = ers_wxyz_center_d2t.clone();
// bottom half rotations
dt = line_time;
Quaternion q0 = quat_center0.multiply(1.0); // clone() orientation
......@@ -217,8 +471,8 @@ public class ErsCorrection extends GeometryCorrection {
}
}
dt = -line_time;
wxy0 = wxyz_center.clone();
wxy1 = wxyz_center_dt.clone();
wxy0 = ers_wxyz_center.clone();
wxy1 = ers_wxyz_center_dt.clone();
for (int h = cent_h; h >= 0; h--) {
for (int i = 0; i < 3; i++) {
ers_xyz[h][i] = wxy0[i];
......@@ -231,6 +485,79 @@ public class ErsCorrection extends GeometryCorrection {
}
/**
* Match other camera px, py, disparity to the reference one
* @param px horizontal pixel coordinate (right) of the reference camera view
* @param py vertical pixel coordinate (down) of the reference camera view
* @param disparity nominal disparity (pixels) of the reference camera view
* @param distortedView true: Radially-distorted reference view, false - rectilinear
* @param reference_xyz reference view lens position during centerline acquisition in world coordinates (typically zero), null - use instance global
* @param reference_atr reference view orientation during centerline acquisition in world frame (typically zero), null - use instance global
* @param distortedCamera true: Radially-distorted camera view, false - rectilinear
* @param camera_xyz camera lens position during centerline acquisition in world coordinates, null - use instance global
* @param camera_atr camera orientation during centerline acquisition in world frame, null - use instance global
* @param line_err iterate until the line (pY) correction is below this value
* @return {px, py, disparity } (right, down) or null if behind the camera
*/
public double [] getImageCoordinatesERS(
QuadCLT cameraQuadCLT, // camera station that got image to be to be matched
double px, // pixel coordinate X in this camera view
double py, // pixel coordinate Y in this camera view
double disparity, // this view disparity
boolean distortedView, // This camera view is distorted (diff.rect), false - rectilinear
double [] reference_xyz, // this view position in world coordinates (typically zero3)
double [] reference_atr, // this view orientation relative to world frame (typically zero3)
boolean distortedCamera, // camera view is distorted (false - rectilinear)
double [] camera_xyz, // camera center in world coordinates
double [] camera_atr, // camera orientation relative to world frame
double line_err) // threshold error in scan lines (1.0)
{
if (reference_xyz == null) reference_xyz = this.camera_xyz;
if (reference_atr == null) reference_atr = this.camera_atr;
// Find world coordinates of the reference pixel
double [] xyzw = getWorldCoordinatesERS( // {x - left,y - up, z (0 at camera, negative away), 1} for real, {x,y,z,0} - for infinity
px,
py,
disparity,
distortedView, // correct distortion (will need corrected background too !)
reference_xyz, // camera center in world coordinates
reference_atr); // camera orientation relative to world frame
if (xyzw == null) {
return null;
}
if (xyzw[3] == 0.0) { // infinity
/*
if (xyzw[2] > 0) {
for (int i = 0; i < 3; i++) {
xyzw[i] = -xyzw[i];
}
}
*/
}
if (xyzw[2] > 0) {
return null; // can not match object behind the camera
}
ErsCorrection ers_other = this;
if (cameraQuadCLT != null) {
ers_other = cameraQuadCLT.getErsCorrection();
}
if (camera_xyz == null) camera_xyz = ers_other.camera_xyz;
if (camera_atr == null) camera_atr = ers_other.camera_atr;
double [] pXpYD = ers_other.getImageCoordinatesERS( // USED in lwir
xyzw,
distortedCamera,
camera_xyz, // camera center in world coordinates
camera_atr, // camera orientation relative to world frame
line_err); // threshold error in scan lines (1.0)
return pXpYD;
}
/**
* Get real world coordinates from pixel coordinates and nominal disparity
* @param px horizontal pixel coordinate (right)
......@@ -241,7 +568,22 @@ public class ErsCorrection extends GeometryCorrection {
* @param camera_atr camera orientation during centerline acquisition in world frame
* @return a vector {x, y, z, 1.0 } in meters. For infinity : {x, y, z, 0}
*/
public double [] getWorldCoordinatesERS( // USED in lwir
public double [] getWorldCoordinatesERS(
double px,
double py,
double disparity,
boolean correctDistortions)// correct distortion (will need corrected background too !)
{
return getWorldCoordinatesERS(
px,
py,
disparity,
correctDistortions,// correct distortion (will need corrected background too !)
camera_xyz, // camera center in world coordinates
camera_atr);
}
public double [] getWorldCoordinatesERS(
double px,
double py,
double disparity,
......@@ -249,6 +591,9 @@ public class ErsCorrection extends GeometryCorrection {
double [] camera_xyz, // camera center in world coordinates
double [] camera_atr) // camera orientation relative to world frame
{
if (Double.isNaN(disparity)) {
return null;
}
double pXcd = px - 0.5 * this.pixelCorrectionWidth;
double pYcd = py - 0.5 * this.pixelCorrectionHeight;
double rD = Math.sqrt(pXcd*pXcd + pYcd*pYcd)*0.001*this.pixelSize; // distorted radius in a virtual center camera
......@@ -291,6 +636,10 @@ public class ErsCorrection extends GeometryCorrection {
world_xyz = (is_infinity) ? cam_center_world : cam_center_world.add(new Vector3D(camera_xyz));
double [] wxyz = world_xyz.toArray();
double [] wxyz4 = {wxyz[0],wxyz[1],wxyz[2], 1.0};
if (Double.isNaN(wxyz4[0])) {
wxyz4[0] = Double.NaN;
}
if (is_infinity) {
wxyz4[3] = 0.0;
}
......@@ -304,8 +653,21 @@ public class ErsCorrection extends GeometryCorrection {
* @param camera_xyz camera lens position during centerline acquisition in world coordinates
* @param camera_atr camera orientation during centerline acquisition in world frame
* @param line_err iterate until the line (pY) correction is below this value
* @return {disparity, px, py} (right, down)
* @return {px, py, disparity } (right, down)
*/
public double [] getImageCoordinatesERS( // USED in lwir
double [] xyzw,
boolean correctDistortions, // correct distortion (will need corrected background too !)
double line_err) // threshold error in scan lines (1.0)
{
return getImageCoordinatesERS( // USED in lwir
xyzw,
correctDistortions, // correct distortion (will need corrected background too !)
camera_xyz, // camera center in world coordinates
camera_atr, // camera orientation relative to world frame
line_err); // threshold error in scan lines (1.0)
}
public double [] getImageCoordinatesERS( // USED in lwir
double [] xyzw,
boolean correctDistortions, // correct distortion (will need corrected background too !)
......@@ -327,7 +689,7 @@ public class ErsCorrection extends GeometryCorrection {
double err = pixelCorrectionHeight / 2;
double [] dxy = null;
// multiple iterations starting with no ERS distortions
while (err > line_err) {
for (int ntry = 0; (ntry < 100) && (err > line_err); ntry++) {
// current camera offset in the centerline camera frame
Vector3D cam_now_local = new Vector3D(ers_xyz[line]);
Vector3D cam_center_now_local = (is_infinity) ? cam_center_local : cam_center_local.subtract(cam_now_local); // skip translation for infinity
......@@ -346,7 +708,7 @@ public class ErsCorrection extends GeometryCorrection {
double rD2RND = correctDistortions?getRDistByR(rND/this.distortionRadius):1.0;
double px = pXc * rD2RND + 0.5 * this.pixelCorrectionWidth; // distorted coordinates relative to the (0.5 * this.pixelCorrectionWidth, 0.5 * this.pixelCorrectionHeight)
double py = pYc * rD2RND + 0.5 * this.pixelCorrectionHeight; // in pixels
dxy = new double [] {disparity, px, py};
dxy = new double [] {px, py, disparity};
int line1 = (int) Math.round(py);
if (line1 < 0) {
line1 = 0;
......
......@@ -40,6 +40,7 @@ import java.util.Properties;
import com.elphel.imagej.cameras.CLTParameters;
import com.elphel.imagej.cameras.ColorProcParameters;
import com.elphel.imagej.cameras.EyesisCorrectionParameters;
import com.elphel.imagej.common.DoubleGaussianBlur;
import com.elphel.imagej.common.ShowDoubleFloatArrays;
import com.elphel.imagej.correction.CorrectionColorProc;
import com.elphel.imagej.correction.EyesisCorrections;
......@@ -62,6 +63,1436 @@ public class QuadCLT extends QuadCLTCPU {
correctionsParameters
);
}
public QuadCLT(QuadCLTCPU pq, String name) {
super (pq, name);
if (pq instanceof QuadCLT) {
this.gpuQuad = ((QuadCLT) pq).gpuQuad; // careful when switching - reset Geometry, vectors, bayer images. Kernels should be the same
}
}
public QuadCLT restoreFromModel(
CLTParameters clt_parameters,
ColorProcParameters colorProcParameters, //
int threadsMax,
int debugLevel)
{
final int debugLevelInner=clt_parameters.batch_run? -2: debugLevel;
// String set_name = image_name; // prevent from being overwritten?
String jp4_copy_path= correctionsParameters.selectX3dDirectory(
this.image_name, // quad timestamp. Will be ignored if correctionsParameters.use_x3d_subdirs is false
correctionsParameters.jp4SubDir,
true, // smart,
true); //newAllowed, // save
String [] sourceFiles = correctionsParameters.selectSourceFileInSet(jp4_copy_path, debugLevel);
SetChannels [] set_channels=setChannels(
null, // single set name
sourceFiles,
debugLevel);
// sets set name to jp4, overwrite
set_channels[0].set_name = this.image_name; // set_name;
double [] referenceExposures = null;
if (!colorProcParameters.lwir_islwir) {
referenceExposures = eyesisCorrections.calcReferenceExposures(sourceFiles, debugLevel);
}
int [] channelFiles = set_channels[0].fileNumber();
boolean [][] saturation_imp = (clt_parameters.sat_level > 0.0)? new boolean[channelFiles.length][] : null;
double [] scaleExposures = new double[channelFiles.length];
ImagePlus [] imp_srcs = conditionImageSet(
clt_parameters, // EyesisCorrectionParameters.CLTParameters clt_parameters,
colorProcParameters, // ColorProcParameters colorProcParameters, //
sourceFiles, // String [] sourceFiles,
this.image_name, // String set_name,
referenceExposures, // double [] referenceExposures,
channelFiles, // int [] channelFiles,
scaleExposures, // output // double [] scaleExposures
saturation_imp, // output // boolean [][] saturation_imp,
threadsMax, // int threadsMax,
debugLevelInner); // int debugLevel);
restoreDSI("-DSI_MAIN"); // "-DSI_COMBO", "-DSI_MAIN" (DSI_COMBO_SUFFIX, DSI_MAIN_SUFFIX)
restoreInterProperties( // restore properties for interscene processing (extrinsics, ers, ...)
null, // String path, // full name with extension or null to use x3d directory
// null, // Properties properties, // if null - will only save extrinsics)
debugLevel);
// showDSIMain();
return this; // can only be QuadCLT instance
}
public double [][] getDSRBG (){
return dsrbg;
}
public void setDSRBG(
CLTParameters clt_parameters,
int threadsMax, // maximal number of threads to launch
boolean updateStatus,
int debugLevel)
{
setDSRBG(
this.dsi[TwoQuadCLT.DSI_DISPARITY_MAIN],
this.dsi[TwoQuadCLT.DSI_STRENGTH_MAIN],
clt_parameters,
threadsMax,
updateStatus,
debugLevel);
}
public void setDSRBG(
double [] disparity,
double [] strength,
CLTParameters clt_parameters,
int threadsMax, // maximal number of threads to launch
boolean updateStatus,
int debugLevel)
{
double[][] rbg = getTileRBG(
clt_parameters,
disparity,
strength,
threadsMax, // maximal number of threads to launch
updateStatus,
debugLevel);
double [][] dsrbg = {
disparity,
strength,
rbg[0],rbg[1],rbg[2]};
this.dsrbg = dsrbg;
}
public double[][] getTileRBG(
CLTParameters clt_parameters,
double [] disparity,
double [] strength,
int threadsMax, // maximal number of threads to launch
boolean updateStatus,
int debugLevel)
{
CLTPass3d scan = new CLTPass3d(tp);
scan.setCalcDisparityStrength(
disparity,
strength);
boolean [] selection = new boolean [disparity.length];
for (int i = 0; i < disparity.length; i++) {
selection[i] = (!Double.isNaN(disparity[i]) && ((strength == null) || (strength[i] > 0)));
}
scan.setTileOpDisparity(selection, disparity);
// will work only with GPU
// reset bayer source, geometry correction/vector
//this.new_image_data = true;
QuadCLT savedQuadClt = gpuQuad.getQuadCLT();
if (savedQuadClt != this) {
gpuQuad.updateQuadCLT(this);
} else {
savedQuadClt = null;
}
setPassAvgRBGA( // get image from a single pass, return relative path for x3d // USED in lwir
clt_parameters, // CLTParameters CLTParameters clt_parameters,,
scan,
threadsMax, // maximal number of threads to launch
updateStatus,
debugLevel);
double [][] rgba = scan.getTilesRBGA();
if (debugLevel > -1) { // -2) {
String title = image_name+"-RBGA";
String [] titles = {"R","B","G","A"};
(new ShowDoubleFloatArrays()).showArrays(
rgba,
tp.getTilesX(),
tp.getTilesY(),
true,
title,
titles);
}
// Maybe resotore y caller?
if (savedQuadClt != null) {
gpuQuad.updateQuadCLT(savedQuadClt);
}
return rgba;
}
public double [][] getOpticalFlow(
double disparity_min,
double disparity_max,
double [][] ds0,
double [][] ds1,
int debugLevel){
// double sigma = .3;
int rel_num_passes = 10;
int n = 2* tp.getTileSize();
double [][] ds0f = new double[2][];
double [][] ds1f = new double[2][];
for (int i = 0; i < 2; i++) {
// ds0f[i] = fillNaNGaps(ds0[i], n,sigma);
// ds1f[i] = fillNaNGaps(ds1[i], n,sigma);
ds0f[i] = fillNaNGaps(ds0[i], n, rel_num_passes, 100);
ds1f[i] = fillNaNGaps(ds1[i], n, rel_num_passes, 100);
}
for (int tile = 0; tile < ds0f[0].length; tile++) {
double d = ds0f[0][tile];
if ((d < disparity_min) || (d > disparity_max)) {
ds0f[0][tile] = Double.NaN;
ds0f[1][tile] = Double.NaN;
}
d = ds1f[0][tile];
if ((d < disparity_min) || (d > disparity_max)) {
ds1f[0][tile] = Double.NaN;
ds1f[1][tile] = Double.NaN;
}
}
double [][] ds0ff = new double[2][];
double [][] ds1ff = new double[2][];
for (int i = 0; i < 2; i++) {
// ds0ff[i] = fillNaNGaps(ds0f[i], n,sigma);
// ds1ff[i] = fillNaNGaps(ds1f[i], n,sigma);
ds0ff[i] = fillNaNGaps(ds0f[i], n, rel_num_passes, 100);
ds1ff[i] = fillNaNGaps(ds1f[i], n, rel_num_passes, 100);
}
if (debugLevel > 0) {
double [][] dbg_img = {
// ds0[1],ds0f[1],ds0ff[1],ds1[1],ds1f[1],ds1ff[1],
// ds0[0],ds0f[0],ds0ff[0],ds1[0],ds1f[0],ds1ff[0]};
ds0[1],ds1[1],ds0f[1],ds1f[1],ds0ff[1],ds1ff[1],
ds0[0],ds1[0],ds0f[0],ds1f[0],ds0ff[0],ds1ff[0]};
String title = "filled_nan_gaps";
// String [] titles = {
// "ds0[1]","ds0f[1]","ds0ff[1]","ds1[1]","ds1f[1]","ds1ff[1]",
// "ds0[0]","ds0f[0]","ds0ff[0]","ds1[0]","ds1f[0]","ds1ff[0]"};
String [] titles = {
"ds0[1]","ds1[1]","ds0f[1]","ds1f[1]","ds0ff[1]","ds1ff[1]",
"ds0[0]","ds1[0]","ds0f[0]","ds1f[0]","ds0ff[0]","ds1ff[0]"};
(new ShowDoubleFloatArrays()).showArrays(
dbg_img,
tp.getTilesX(),
tp.getTilesY(),
true,
title,
titles);
}
return null;
}
public double[] fillNaNGapsOld(
double [] data,
int n,
double sigma) {
double [] d = data.clone();
for (int i = 0; i < n; i++) {
d = (new DoubleGaussianBlur()).blurWithNaN(
d, // double[] pixels,
null, // double [] in_weight, // or null
tp.getTilesX(), // int width,
tp.getTilesY(), // int height,
sigma, // double sigmaX,
sigma, // double sigmaY,
0.00001); // double accuracy);
}
for (int i = 0; i < data.length; i++) {
if (!Double.isNaN(data[i])) {
d[i] = data[i];
}
}
return d;
}
public double[] fillNaNGaps(
double [] data,
int n,
int rel_num_passes,
int threadsMax)
{
int num_passes = n * rel_num_passes;
return tp.fillNaNs(
data, // double [] data,
tp.getTilesX(), //int width,
2 * n, // int grow,
0.5 * Math.sqrt(2.0), // double diagonal_weight, // relative to ortho
num_passes, // int num_passes,
threadsMax); // final int threadsMax) // maximal number of threads to launch
}
public double[][][] get_pair(
double k_prev,
QuadCLT qprev,
double corr_scale, // = 0.75
int debug_level)
{
final int iscale = 8;
double ts = getTimeStamp();
double ts_prev = ts;
double [] zero3 = {0.0,0.0,0.0};
double [] camera_xyz0 = zero3.clone();
double [] camera_atr0 = zero3.clone();
ErsCorrection ersCorrection = (ErsCorrection) geometryCorrection;
System.out.println("\n"+image_name+":\n"+ersCorrection.extrinsic_corr.toString());
System.out.println(String.format("%s: ers_wxyz_center= %f, %f, %f", image_name,
ersCorrection.ers_wxyz_center[0], ersCorrection.ers_wxyz_center[1],ersCorrection.ers_wxyz_center[2] ));
System.out.println(String.format("%s: ers_wxyz_center_dt= %f, %f, %f", image_name,
ersCorrection.ers_wxyz_center_dt[0], ersCorrection.ers_wxyz_center_dt[1],ersCorrection.ers_wxyz_center_dt[2] ));
System.out.println(String.format("%s: ers_wxyz_center_d2t= %f, %f, %f", image_name,
ersCorrection.ers_wxyz_center_d2t[0], ersCorrection.ers_wxyz_center_d2t[1],ersCorrection.ers_wxyz_center_d2t[2] ));
System.out.println(String.format("%s: ers_watr_center_dt= %f, %f, %f", image_name,
ersCorrection.ers_watr_center_dt[0], ersCorrection.ers_watr_center_dt[1],ersCorrection.ers_watr_center_dt[2] ));
System.out.println(String.format("%s: ers_watr_center_d2t= %f, %f, %f", image_name,
ersCorrection.ers_watr_center_d2t[0], ersCorrection.ers_watr_center_d2t[1],ersCorrection.ers_watr_center_d2t[2] ));
double dt = 0.0;
if (qprev == null) {
qprev = this;
}
if (qprev != null) {
ts_prev = qprev.getTimeStamp();
dt = ts-ts_prev;
if (dt < 0) {
k_prev = (1.0-k_prev);
}
if (Math.abs(dt) > 0.15) { // at least two frames TODO: use number of lines* line_time * ...?
k_prev = 0.5;
System.out.println("Non-consecutive frames, dt = "+dt);
}
ErsCorrection ersCorrectionPrev = (ErsCorrection) (qprev.geometryCorrection);
double [] wxyz_center_dt_prev = ersCorrectionPrev.ers_wxyz_center_dt;
double [] watr_center_dt_prev = ersCorrectionPrev.ers_watr_center_dt;
double [] wxyz_delta = new double[3];
double [] watr_delta = new double[3];
for (int i = 0; i <3; i++) {
wxyz_delta[i] = - corr_scale * dt * (k_prev * wxyz_center_dt_prev[i] + (1.0-k_prev) * ersCorrection.ers_wxyz_center_dt[i]);
watr_delta[i] = - corr_scale * dt * (k_prev * watr_center_dt_prev[i] + (1.0-k_prev) * ersCorrection.ers_watr_center_dt[i]);
}
camera_xyz0 = wxyz_delta;
camera_atr0 = watr_delta;
}
int tilesX = tp.getTilesX();
int tilesY = tp.getTilesY();
String [] dsrbg_titles = {"d", "s", "r", "b", "g"};
double [][] dsrbg = this.transformCameraVew(
qprev, // QuadCLT camera_QuadClt,
camera_xyz0, // double [] camera_xyz, // camera center in world coordinates
camera_atr0, //double [] camera_atr, // camera orientation relative to world frame
iscale);
double [][][] pair = {getDSRBG(),dsrbg};
// combine this scene with warped previous one
if (debug_level > 0) {
String [] rtitles = new String[2* dsrbg_titles.length];
double [][] dbg_rslt = new double [rtitles.length][];
for (int i = 0; i < dsrbg_titles.length; i++) {
rtitles[2*i] = dsrbg_titles[i]+"0";
rtitles[2*i+1] = dsrbg_titles[i];
dbg_rslt[2*i] = pair[0][i];
dbg_rslt[2*i+1] = pair[1][i];
}
String title = image_name+"-"+qprev.image_name+"-dt"+dt;
(new ShowDoubleFloatArrays()).showArrays(
dbg_rslt,
tilesX,
tilesY,
true,
title,
rtitles);
}
return pair;
}
public double [][] transformCameraVew(
QuadCLT camera_QuadClt,
double [] camera_xyz, // camera center in world coordinates
double [] camera_atr, // camera orientation relative to world frame
int iscale)
{
double line_err = 10.0; // 0.1; // BUG
int tilesX = tp.getTilesX();
int tilesY = tp.getTilesY();
int tiles = tilesX*tilesY;
int transform_size = tp.getTileSize();
int rel_num_passes = 10;
int num_passes = transform_size; // * 2;
double [] zero3 = {0.0,0.0,0.0};
int stilesX = iscale*tilesX;
int stilesY = iscale*tilesY;
int stiles = stilesX*stilesY;
double sigma = 0.5 * iscale;
double scale = 1.0 * iscale/transform_size;
double [][] dsrbg_camera = camera_QuadClt.getDSRBG();
double [][] ds = new double [dsrbg_camera.length][stiles];
for (int i = 0; i <ds.length; i++) {
for (int j = 0; j <ds[i].length; j++) {
ds[i][j] = Double.NaN;
}
}
ErsCorrection ersCorrection = getErsCorrection();
ersCorrection.setupERS(); // just in case - setUP using instance paRAMETERS
double [] zbuffer = new double [tiles];
for (int tileY = 0; tileY < tilesY; tileY++) {
int stileY = iscale * tileY + iscale/2;
for (int tileX = 0; tileX < tilesX; tileX++) {
int stileX = iscale * tileX + iscale/2;
int nTile = tileX + tileY * tilesX;
double centerX = tileX * transform_size + transform_size/2; // - shiftX;
double centerY = tileY * transform_size + transform_size/2; // - shiftY;
double disparity = dsrbg_camera[DSRBG_DISPARITY][nTile];
if (disparity < 0) {
disparity = 0.0;
}
// found that there are tiles with strength == 0.0, while disparity is not NaN
if (!Double.isNaN(disparity) && (dsrbg_camera[DSRBG_STRENGTH][nTile] > 0.0)) {
double [] pXpYD = ersCorrection.getImageCoordinatesERS(
camera_QuadClt, // QuadCLT cameraQuadCLT, // camera station that got image to be to be matched
centerX, // double px, // pixel coordinate X in this camera view
centerY, //double py, // pixel coordinate Y in this camera view
disparity, // double disparity, // this view disparity
true, // boolean distortedView, // This camera view is distorted (diff.rect), false - rectilinear
zero3, // double [] reference_xyz, // this view position in world coordinates (typically zero3)
zero3, // double [] reference_atr, // this view orientation relative to world frame (typically zero3)
true, // boolean distortedCamera, // camera view is distorted (false - rectilinear)
camera_xyz, // double [] camera_xyz, // camera center in world coordinates
camera_atr, // double [] camera_atr, // camera orientation relative to world frame
line_err); // double line_err) // threshold error in scan lines (1.0)
if (pXpYD != null) {
int px = (int) Math.round(pXpYD[0]/transform_size);
int py = (int) Math.round(pXpYD[1]/transform_size);
int spx = (int) Math.round(pXpYD[0]*scale);
int spy = (int) Math.round(pXpYD[1]*scale);
if ((px >= 0) && (py >= 0) && (px < tilesX) & (py < tilesY)) {
//Z-buffer
if (!(pXpYD[2] < zbuffer[px + py* tilesX])) {
zbuffer[px + py* tilesX] = pXpYD[2];
if ((spx >= 0) && (spy >= 0) && (spx < stilesX) & (spy < stilesY)) {
int sTile = spx + spy* stilesX;
ds[DSRBG_DISPARITY][sTile] = pXpYD[2]; //reduce*
for (int i = DSRBG_STRENGTH; i < dsrbg_camera.length; i++) {
ds[i][sTile] = dsrbg_camera[i][nTile]; // reduce *
}
}
}
}
}
}
}
}
//dsrbg_out[DSRBG_DISPARITY]
for (int i = 0; i < ds.length; i++) {
ds[i] = (new DoubleGaussianBlur()).blurWithNaN(
ds[i], // double[] pixels,
null, // double [] in_weight, // or null
stilesX, // int width,
stilesY, // int height,
sigma, // double sigmaX,
sigma, // double sigmaY,
0.01); // double accuracy);
}
double [][] dsrbg_out = new double [dsrbg_camera.length][tiles];
int [][] num_non_nan = new int [dsrbg_out.length] [tiles];
for (int stileY = 0; stileY < stilesY; stileY++) {
int tileY = stileY / iscale;
for (int stileX = 0; stileX < stilesX; stileX++) {
int tileX = stileX / iscale;
int stile = stileX + stileY * stilesX;
int tile = tileX + tileY * tilesX;
for (int i = 0; i < dsrbg_out.length; i++) {
double d = ds[i][stile];
if (!Double.isNaN(d)) {
num_non_nan[i][tile] ++;
dsrbg_out[i][tile] += d;
}
}
}
}
for (int i = 0; i < dsrbg_out.length; i++) {
for (int j = 0; j < tiles; j++) {
if (num_non_nan[i][j] == 0) {
dsrbg_out[i][j] = Double.NaN;
} else {
dsrbg_out[i][j]/=num_non_nan[i][j];
}
}
}
if (num_passes > 0) {
for (int i = 0; i < dsrbg_out.length; i++) {
dsrbg_out[i] = fillNaNGaps(dsrbg_out[i], num_passes, rel_num_passes, 100); // threadsMax);
}
}
return dsrbg_out;
}
@Deprecated
public double[][][] get_pair_old(
double k_prev,
QuadCLT qprev,
double corr_scale, // = 0.75
int debug_level)
{
final int iscale = 8;
double ts = getTimeStamp();
double ts_prev = ts;
double [] zero3 = {0.0,0.0,0.0};
double [] camera_xyz0 = zero3.clone();
double [] camera_atr0 = zero3.clone();
ErsCorrection ersCorrection = (ErsCorrection) geometryCorrection;
System.out.println("\n"+image_name+":\n"+ersCorrection.extrinsic_corr.toString());
System.out.println(String.format("%s: ers_wxyz_center= %f, %f, %f", image_name,
ersCorrection.ers_wxyz_center[0], ersCorrection.ers_wxyz_center[1],ersCorrection.ers_wxyz_center[2] ));
System.out.println(String.format("%s: ers_wxyz_center_dt= %f, %f, %f", image_name,
ersCorrection.ers_wxyz_center_dt[0], ersCorrection.ers_wxyz_center_dt[1],ersCorrection.ers_wxyz_center_dt[2] ));
System.out.println(String.format("%s: ers_wxyz_center_d2t= %f, %f, %f", image_name,
ersCorrection.ers_wxyz_center_d2t[0], ersCorrection.ers_wxyz_center_d2t[1],ersCorrection.ers_wxyz_center_d2t[2] ));
System.out.println(String.format("%s: ers_watr_center_dt= %f, %f, %f", image_name,
ersCorrection.ers_watr_center_dt[0], ersCorrection.ers_watr_center_dt[1],ersCorrection.ers_watr_center_dt[2] ));
System.out.println(String.format("%s: ers_watr_center_d2t= %f, %f, %f", image_name,
ersCorrection.ers_watr_center_d2t[0], ersCorrection.ers_watr_center_d2t[1],ersCorrection.ers_watr_center_d2t[2] ));
double dt = 0.0;
if (qprev == null) {
qprev = this;
}
if (qprev != null) {
ts_prev = qprev.getTimeStamp();
dt = ts-ts_prev;
if (dt < 0) {
k_prev = (1.0-k_prev);
}
if (Math.abs(dt) > 0.15) { // at least two frames TODO: use number of lines* line_time * ...?
k_prev = 0.5;
System.out.println("Non-consecutive frames, dt = "+dt);
}
ErsCorrection ersCorrectionPrev = (ErsCorrection) (qprev.geometryCorrection);
double [] wxyz_center_dt_prev = ersCorrectionPrev.ers_wxyz_center_dt;
double [] watr_center_dt_prev = ersCorrectionPrev.ers_watr_center_dt;
double [] wxyz_delta = new double[3];
double [] watr_delta = new double[3];
for (int i = 0; i <3; i++) {
wxyz_delta[i] = - corr_scale * dt * (k_prev * wxyz_center_dt_prev[i] + (1.0-k_prev) * ersCorrection.ers_wxyz_center_dt[i]);
watr_delta[i] = - corr_scale * dt * (k_prev * watr_center_dt_prev[i] + (1.0-k_prev) * ersCorrection.ers_watr_center_dt[i]);
}
camera_xyz0 = wxyz_delta;
camera_atr0 = watr_delta;
}
int tilesX = tp.getTilesX();
int tilesY = tp.getTilesY();
String [] dsrbg_titles = {"d", "s", "r", "b", "g"};
double [][] dsrbg0 = qprev.transformCameraVew( // previous camera view, ers only
zero3, // double [] camera_xyz, // camera center in world coordinates
zero3, // double [] camera_atr, // camera orientation relative to world frame
iscale);
if (debug_level > 1) {
String title0 = String.format("%s_DSRBG_ers-only",qprev.image_name); // previous frame, ERS-corrected only, no shift/rot
(new ShowDoubleFloatArrays()).showArrays(
dsrbg0,
tilesX,
tilesY,
true,
title0,
dsrbg_titles);
}
double [][] dsrbg = transformCameraVew(
camera_xyz0, // double [] camera_xyz, // camera center in world coordinates
camera_atr0, //double [] camera_atr, // camera orientation relative to world frame
iscale); //
if (debug_level > 1) {
String title = String.format("%s_%f:%f:%f_%f:%f:%f",image_name,
camera_xyz0[0],camera_xyz0[1],camera_xyz0[2],camera_atr0[0],camera_atr0[1],camera_atr0[2]);
(new ShowDoubleFloatArrays()).showArrays(
dsrbg,
tilesX,
tilesY,
true,
title,
dsrbg_titles);
}
double [][] dsrbg1 = this.transformCameraVew(
qprev, // QuadCLT camera_QuadClt,
camera_xyz0, // double [] camera_xyz, // camera center in world coordinates
camera_atr0, //double [] camera_atr, // camera orientation relative to world frame
iscale);
/// double [][][] pair = {dsrbg0,dsrbg};
/// double [][][] pair = {qprev.getDSRBG(),dsrbg};
/// double [][][] pair = {qprev.getDSRBG(),dsrbg1};
double [][][] pair = {getDSRBG(),dsrbg1};
// combine previous frame with this one
if (debug_level > 0) {
String [] rtitles = new String[2* dsrbg_titles.length];
double [][] dbg_rslt = new double [rtitles.length][];
for (int i = 0; i < dsrbg_titles.length; i++) {
rtitles[2*i] = dsrbg_titles[i]+"0";
rtitles[2*i+1] = dsrbg_titles[i];
dbg_rslt[2*i] = pair[0][i];
dbg_rslt[2*i+1] = pair[1][i];
}
String title = image_name+"-"+qprev.image_name+"-dt"+dt;
(new ShowDoubleFloatArrays()).showArrays(
dbg_rslt,
tilesX,
tilesY,
true,
title,
rtitles);
}
return pair;
}
@Deprecated
public double [][] transformCameraVew(
double [] camera_xyz, // camera center in world coordinates
double [] camera_atr, // camera orientation relative to world frame
int iscale)
{
double [][] dsrbg = getDSRBG();
return transformCameraVew(
dsrbg, // double [][] dsi, //
camera_xyz, // double [] camera_xyz, // camera center in world coordinates
camera_atr, // double [] camera_atr, // camera orientation relative to world frame
iscale,
// normally all 0;
null, // double [] wxyz_center, // world camera XYZ (meters) for the frame center
null, // double [] wxyz_center_dt, // world camera Vx, Vy, Vz (m/s)
null, // double [] wxyz_center_d2t, // world camera Vx, Vy, Vz (m/s^2)
null, // double [] watr_center_dt, // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
null); // double [] watr_center_d2t) // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
}
@Deprecated
public double [][] transformCameraVew(
double [][] dsrbg, //
double [] camera_xyz, // camera center in world coordinates
double [] camera_atr, // camera orientation relative to world frame
final int iscale, // 8
// normally all 0;
double [] wxyz_center, // world camera XYZ (meters) for the frame center
double [] wxyz_center_dt, // world camera Vx, Vy, Vz (m/s)
double [] wxyz_center_d2t, // world camera Vx, Vy, Vz (m/s^2)
double [] watr_center_dt, // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
double [] watr_center_d2t) // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
{
double line_err = 10.0; // 0.1; // BUG
int tilesX = tp.getTilesX();
int tilesY = tp.getTilesY();
int tiles = tilesX*tilesY;
int transform_size = tp.getTileSize();
int rel_num_passes = 10;
int num_passes = transform_size; // * 2;
double [] zero3 = {0.0,0.0,0.0};
int stilesX = iscale*tilesX;
int stilesY = iscale*tilesY;
int stiles = stilesX*stilesY;
double sigma = 0.5 * iscale;
double scale = 1.0 * iscale/transform_size;
// double reduce = 1.0 / iscale/iscale;
double [][] ds = new double [dsrbg.length][stiles];
for (int i = 0; i <ds.length; i++) {
for (int j = 0; j <ds[i].length; j++) {
ds[i][j] = Double.NaN;
}
}
ErsCorrection ersCorrection = (ErsCorrection) geometryCorrection;
// save original
double [] saved_ers_wxyz_center = ersCorrection.ers_wxyz_center;
double [] saved_ers_wxyz_center_dt = ersCorrection.ers_wxyz_center_dt;
double [] saved_ers_wxyz_center_d2t = ersCorrection.ers_wxyz_center_d2t;
double [] saved_ers_watr_center_dt = ersCorrection.ers_watr_center_dt;
double [] saved_ers_watr_center_d2t = ersCorrection.ers_watr_center_d2t;
if (wxyz_center == null) wxyz_center = ersCorrection.ers_wxyz_center;
if (wxyz_center_dt == null) wxyz_center_dt = ersCorrection.ers_wxyz_center_dt;
if (wxyz_center_d2t == null) wxyz_center_d2t = ersCorrection.ers_wxyz_center_d2t;
if (watr_center_dt == null) watr_center_dt = ersCorrection.ers_watr_center_dt;
if (watr_center_d2t == null) watr_center_d2t = ersCorrection.ers_watr_center_d2t;
ersCorrection.setupERS(
wxyz_center, // double [] wxyz_center, // world camera XYZ (meters) for the frame center
wxyz_center_dt, // double [] wxyz_center_dt, // world camera Vx, Vy, Vz (m/s)
wxyz_center_d2t, // double [] wxyz_center_d2t, // world camera Vx, Vy, Vz (m/s^2)
watr_center_dt, // double [] watr_center_dt, // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
watr_center_d2t); // double [] watr_center_d2t); // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
double [][] wxyz = new double [tiles][];
for (int tileY = 0; tileY < tilesY; tileY++) {
// int stileY = iscale * tileY + iscale/2;
for (int tileX = 0; tileX < tilesX; tileX++) {
// int stileX = iscale * tileX + iscale/2;
// int stile = stileX + stilesX * stileY;
int nTile = tileX + tileY * tilesX;
double centerX = tileX * transform_size + transform_size/2; // - shiftX;
double centerY = tileY * transform_size + transform_size/2; // - shiftY;
double disparity = dsrbg[DSRBG_DISPARITY][nTile];
if (disparity < 0) {
disparity = 0.0;
}
// found that there are tiles with strength == 0.0, while disparity is not NaN
if (!Double.isNaN(disparity) && (dsrbg[DSRBG_STRENGTH][nTile] > 0.0)) {
wxyz[nTile] = ersCorrection.getWorldCoordinatesERS(
centerX, // double px,
centerY, // double py,
disparity, // double disparity,
true, // boolean correctDistortions,// correct distortion (will need corrected background too !)
camera_xyz, // double [] camera_xyz, // camera center in world coordinates
camera_atr); // double [] camera_atr) // camera orientation relative to world frame
}
}
}
ersCorrection.setupERS(
zero3, // double [] wxyz_center, // world camera XYZ (meters) for the frame center
zero3, // double [] wxyz_center_dt, // world camera Vx, Vy, Vz (m/s)
zero3, // double [] wxyz_center_d2t, // world camera Vx, Vy, Vz (m/s^2)
zero3, // double [] watr_center_dt, // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
zero3); // double [] watr_center_d2t); // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
double [] zbuffer = new double [tiles];
for (int tileY = 0; tileY < tilesY; tileY++) {
for (int tileX = 0; tileX < tilesX; tileX++) {
int nTile = tileX + tileY * tilesX;
// if (!Double.isNaN(dbg_img[indx_xyz][nTile])) {
if (wxyz[nTile] != null) {
double [] pXpYD = ersCorrection.getImageCoordinatesERS(
wxyz[nTile], // double [] wxyz,
true, // boolean correctDistortions,// correct distortion (will need corrected background too !)
zero3, // double [] camera_xyz, // camera center in world coordinates
zero3, // double [] camera_atr) // camera orientation relative to world frame
line_err); // double line_err) // threshold error in scan lines (1.0)
if (pXpYD != null) {
int px = (int) Math.round(pXpYD[0]/transform_size);
int py = (int) Math.round(pXpYD[1]/transform_size);
int spx = (int) Math.round(pXpYD[0]*scale);
int spy = (int) Math.round(pXpYD[1]*scale);
if ((px >= 0) && (py >= 0) && (px < tilesX) & (py < tilesY)) {
//Z-buffer
if (!(pXpYD[2] < zbuffer[px + py* tilesX])) {
zbuffer[px + py* tilesX] = pXpYD[2];
if ((spx >= 0) && (spy >= 0) && (spx < stilesX) & (spy < stilesY)) {
int sTile = spx + spy* stilesX;
ds[DSRBG_DISPARITY][sTile] = pXpYD[2]; //reduce*
for (int i = DSRBG_STRENGTH; i < dsrbg.length; i++) {
ds[i][sTile] = dsrbg[i][nTile]; // reduce *
}
}
}
}
}
}
}
}
//dsrbg_out[DSRBG_DISPARITY]
for (int i = 0; i < ds.length; i++) {
// int rel_num_passes = 10;
// int num_passes = 2 * transform_size;
// ds[i] = fillNaNGaps(ds[i], num_passes, rel_num_passes, 100); // threadsMax);
/* */
ds[i] = (new DoubleGaussianBlur()).blurWithNaN(
ds[i], // double[] pixels,
null, // double [] in_weight, // or null
stilesX, // int width,
stilesY, // int height,
sigma, // double sigmaX,
sigma, // double sigmaY,
0.01); // double accuracy);
/* */
}
double [][] dsrbg_out = new double [dsrbg.length][tiles];
int [][] num_non_nan = new int [dsrbg_out.length] [tiles];
for (int stileY = 0; stileY < stilesY; stileY++) {
int tileY = stileY / iscale;
for (int stileX = 0; stileX < stilesX; stileX++) {
int tileX = stileX / iscale;
int stile = stileX + stileY * stilesX;
int tile = tileX + tileY * tilesX;
for (int i = 0; i < dsrbg_out.length; i++) {
double d = ds[i][stile];
if (!Double.isNaN(d)) {
num_non_nan[i][tile] ++;
dsrbg_out[i][tile] += d;
}
}
}
}
for (int i = 0; i < dsrbg_out.length; i++) {
for (int j = 0; j < tiles; j++) {
if (num_non_nan[i][j] == 0) {
dsrbg_out[i][j] = Double.NaN;
} else {
dsrbg_out[i][j]/=num_non_nan[i][j];
}
}
}
if (num_passes > 0) {
for (int i = 0; i < dsrbg_out.length; i++) {
dsrbg_out[i] = fillNaNGaps(dsrbg_out[i], num_passes, rel_num_passes, 100); // threadsMax);
}
}
// restore original
ersCorrection.setupERS(
saved_ers_wxyz_center, // double [] wxyz_center, // world camera XYZ (meters) for the frame center
saved_ers_wxyz_center_dt, // double [] wxyz_center_dt, // world camera Vx, Vy, Vz (m/s)
saved_ers_wxyz_center_d2t, // double [] wxyz_center_d2t, // world camera Vx, Vy, Vz (m/s^2)
saved_ers_watr_center_dt, // double [] watr_center_dt, // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
saved_ers_watr_center_d2t); // double [] watr_center_d2t); // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
return dsrbg_out;
}
@Deprecated
public double[][] test_back_forth_debug(
double k_prev,
QuadCLT qprev,
double corr_scale, // = 0.75
int debug_level)
{
double ts = getTimeStamp();
double ts_prev = ts;
boolean force_manual = false;
double [] zero3 = {0.0,0.0,0.0};
double [] camera_xyz0 = {0.0,0.0,0.0};
double [] camera_atr0 = {0.0,0.0,0.0};
double [] watr_center_dt = {0.0, 0.0, 0.0};
double [] watr_center_d2t = {0.0, 0.0, 0.0};
double [] wxyz_center = {0.0, 0.0, 0.0};
double [] wxyz_center_dt = {0.0, 0.0, 0.0};
double [] wxyz_center_d2t = {0.0, 0.0, 0.0};
ErsCorrection ersCorrection = (ErsCorrection) geometryCorrection;
// save original
double [] saved_ers_wxyz_center = ersCorrection.ers_wxyz_center;
double [] saved_ers_wxyz_center_dt = ersCorrection.ers_wxyz_center_dt;
double [] saved_ers_wxyz_center_d2t = ersCorrection.ers_wxyz_center_d2t;
double [] saved_ers_watr_center_dt = ersCorrection.ers_watr_center_dt;
double [] saved_ers_watr_center_d2t = ersCorrection.ers_watr_center_d2t;
System.out.println("\n"+image_name+":\n"+ersCorrection.extrinsic_corr.toString());
System.out.println(String.format("%s: ers_wxyz_center= %f, %f, %f", image_name,saved_ers_wxyz_center[0], saved_ers_wxyz_center[1],saved_ers_wxyz_center[2] ));
System.out.println(String.format("%s: ers_wxyz_center_dt= %f, %f, %f", image_name,saved_ers_wxyz_center_dt[0], saved_ers_wxyz_center_dt[1],saved_ers_wxyz_center_dt[2] ));
System.out.println(String.format("%s: ers_wxyz_center_d2t= %f, %f, %f", image_name,saved_ers_wxyz_center_d2t[0], saved_ers_wxyz_center_d2t[1],saved_ers_wxyz_center_d2t[2] ));
System.out.println(String.format("%s: ers_watr_center_dt= %f, %f, %f", image_name,saved_ers_watr_center_dt[0], saved_ers_watr_center_dt[1],saved_ers_watr_center_dt[2] ));
System.out.println(String.format("%s: ers_watr_center_d2t= %f, %f, %f", image_name,saved_ers_watr_center_d2t[0], saved_ers_watr_center_d2t[1],saved_ers_watr_center_d2t[2] ));
// Comment out to use manual values
if (force_manual) {
System.out.println("\nUsing manually set values:");
System.out.println(String.format("%s: wxyz_center= %f, %f, %f", image_name,wxyz_center[0], wxyz_center[1],wxyz_center[2] ));
System.out.println(String.format("%s: wxyz_center_dt= %f, %f, %f", image_name,wxyz_center_dt[0], wxyz_center_dt[1],wxyz_center_dt[2] ));
System.out.println(String.format("%s: wxyz_center_d2t= %f, %f, %f", image_name,wxyz_center_d2t[0], wxyz_center_d2t[1],wxyz_center_d2t[2] ));
System.out.println(String.format("%s: watr_center_dt= %f, %f, %f", image_name,watr_center_dt[0], watr_center_dt[1],watr_center_dt[2] ));
System.out.println(String.format("%s: watr_center_d2t= %f, %f, %f", image_name,watr_center_d2t[0], watr_center_d2t[1],watr_center_d2t[2] ));
}else {
wxyz_center = saved_ers_wxyz_center;
wxyz_center_dt = saved_ers_wxyz_center_dt;
wxyz_center_d2t = saved_ers_wxyz_center_d2t; // double [] wxyz_center_d2t, // world camera Vx, Vy, Vz (m/s^2)
watr_center_dt = saved_ers_watr_center_dt;
watr_center_d2t = saved_ers_watr_center_d2t;
}
double dt = 0.0;
if (qprev == null) {
qprev = this;
}
if (qprev != null) {
ts_prev = qprev.getTimeStamp();
dt = ts-ts_prev;
if (dt < 0) {
k_prev = (1.0-k_prev);
}
if (Math.abs(dt) > 0.15) { // at least two frames TODO: use number of lines* line_time * ...?
k_prev = 0.5;
System.out.println("Non-consecutive frames, dt = "+dt);
}
ErsCorrection ersCorrectionPrev = (ErsCorrection) (qprev.geometryCorrection);
double [] wxyz_center_dt_prev = ersCorrectionPrev.ers_wxyz_center_dt;
double [] watr_center_dt_prev = ersCorrectionPrev.ers_watr_center_dt;
double [] wxyz_delta = new double[3];
double [] watr_delta = new double[3];
for (int i = 0; i <3; i++) {
wxyz_delta[i] = - corr_scale * dt * (k_prev * wxyz_center_dt_prev[i] + (1.0-k_prev) * saved_ers_wxyz_center_dt[i]);
watr_delta[i] = - corr_scale * dt * (k_prev * watr_center_dt_prev[i] + (1.0-k_prev) * saved_ers_watr_center_dt[i]);
}
camera_xyz0 = wxyz_delta;
camera_atr0 = watr_delta;
}
int tilesX = tp.getTilesX();
int tilesY = tp.getTilesY();
String [] titles = {"s0", "s1", "d0", "d1", "pX0","pX", "pY0","pY", "x","y","z","w"};
double [][] dsi = {this.dsi[TwoQuadCLT.DSI_DISPARITY_MAIN], this.dsi[TwoQuadCLT.DSI_STRENGTH_MAIN]};
double [][] dbg_img0 = new double [titles.length][]; // null; // previous camera view, ers only
double [][] dbg_img1 = new double [titles.length][]; // null; // this camera view, ers only
if (!force_manual) {
dbg_img0 = qprev.transformCameraVewDebug( // previous camera view, ers only
zero3, // double [] camera_xyz, // camera center in world coordinates
zero3); // double [] camera_atr, // camera orientation relative to world frame
dbg_img1 = transformCameraVewDebug( // this camera view, ers only
zero3, // double [] camera_xyz, // camera center in world coordinates
zero3); // double [] camera_atr, // camera orientation relative to world frame
if (debug_level > 1) {
String title0 = String.format("%s_ers_only",qprev.image_name); // previous frame, ERS-corrected only, no shift/rot
(new ShowDoubleFloatArrays()).showArrays(
dbg_img0,
tilesX,
tilesY,
true,
title0,
titles);
}
}
double [][] dbg_img = transformCameraVewDebug(
dsi, //
camera_xyz0, // double [] camera_xyz, // camera center in world coordinates
camera_atr0, //double [] camera_atr, // camera orientation relative to world frame
wxyz_center, // double [] wxyz_center, // world camera XYZ (meters) for the frame center
wxyz_center_dt, // double [] wxyz_center_dt, // world camera Vx, Vy, Vz (m/s)
wxyz_center_d2t, // double [] wxyz_center_d2t, // world camera Vx, Vy, Vz (m/s^2)
watr_center_dt, // double [] watr_center_dt, // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
watr_center_d2t); // double [] watr_center_d2t); // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
if (debug_level > 1) {
String title = String.format("%s_%f:%f:%f_%f:%f:%f",image_name,
camera_xyz0[0],camera_xyz0[1],camera_xyz0[2],camera_atr0[0],camera_atr0[1],camera_atr0[2]);
if (!force_manual) {
title += String.format ("_ers_%f:%f:%f_%f:%f:%f",wxyz_center_dt[0],wxyz_center_dt[1],wxyz_center_dt[2],
watr_center_dt[0],watr_center_dt[1],watr_center_dt[2]);
}
(new ShowDoubleFloatArrays()).showArrays(
dbg_img,
tilesX,
tilesY,
true,
title,
titles);
}
int indx_s0 = 0;
int indx_s1 = 1;
int indx_d0 = 2;
int indx_d1 = 3;
// combine previous frame with this one
String [] rtitles = {"s","s_ers", "s_prev","s_this",
"d","d_ers", "d_prev","d_this"};
double [][] rslt = {
dbg_img1[indx_s0], // this strength as is
dbg_img1[indx_s1], // this strength - ers only
dbg_img0[indx_s1], // previous strength - ers only to match to
dbg_img [indx_s1], // this strength - ers and shift/rot
dbg_img1[indx_d0], // this disparity as is
dbg_img1[indx_d1], // this disparity - ers only
dbg_img0[indx_d1], // previous disparity - ers only to match to
dbg_img [indx_d1]};// this disparity - ers and shift/rot
if (debug_level > 0) {
String title = image_name+"-"+qprev.image_name+"-dt"+dt;
(new ShowDoubleFloatArrays()).showArrays(
rslt,
tilesX,
tilesY,
true,
title,
rtitles);
}
return rslt;
}
@Deprecated
public double [][] transformCameraVewDebug(
double [] camera_xyz, // camera center in world coordinates
double [] camera_atr) // camera orientation relative to world frame
{
double [][] dsi = {this.dsi[TwoQuadCLT.DSI_DISPARITY_MAIN], this.dsi[TwoQuadCLT.DSI_STRENGTH_MAIN]};
return transformCameraVewDebug(
dsi, //
camera_xyz, // camera center in world coordinates
camera_atr, // camera orientation relative to world frame
// normally all 0;
null, // double [] wxyz_center, // world camera XYZ (meters) for the frame center
null, // double [] wxyz_center_dt, // world camera Vx, Vy, Vz (m/s)
null, // double [] wxyz_center_d2t, // world camera Vx, Vy, Vz (m/s^2)
null, // double [] watr_center_dt, // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
null); // double [] watr_center_d2t) // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
}
@Deprecated
public double [][] transformCameraVewDebug(
double [][] dsi, //
double [] camera_xyz, // camera center in world coordinates
double [] camera_atr, // camera orientation relative to world frame
// normally all 0;
double [] wxyz_center, // world camera XYZ (meters) for the frame center
double [] wxyz_center_dt, // world camera Vx, Vy, Vz (m/s)
double [] wxyz_center_d2t, // world camera Vx, Vy, Vz (m/s^2)
double [] watr_center_dt, // camera rotations (az, tilt, roll in radians/s, corresponding to the frame center)
double [] watr_center_d2t) // camera rotations (az, tilt, roll in radians/s, corresponding to the frame center)
{
double line_err = 10.0; // 0.1; // bug
int tilesX = tp.getTilesX();
int tilesY = tp.getTilesY();
int tiles = tilesX*tilesY;
int transform_size = tp.getTileSize();
// double [] ers_watr_center_dt = {0.168792, 0.037145, -0.060279};
double [] zero3 = {0.0,0.0,0.0};
int indx_s0 = 0;
int indx_s1 = 1;
int indx_d0 = 2;
int indx_d1 = 3;
/// int indx_pX0 = 4;
int indx_pX = 5;
/// int indx_pY0 = 6;
int indx_pY = 7;
int indx_xyz = 8;
double [][] dbg_img = new double [12][tiles];
for (int i = 0; i <dbg_img.length; i++) {
for (int j = 0; j <dbg_img[i].length; j++) {
dbg_img[i][j] = Double.NaN;
}
}
final int iscale = 8;
int stilesX = iscale*tilesX;
int stilesY = iscale*tilesY;
int stiles = stilesX*stilesY;
double sigma = 0.5 * iscale;
double scale = 1.0*iscale/transform_size;
double reduce = 1.0/iscale/iscale;
double [][] ds = new double [2][stiles];
double [][] ds0 = new double [2][stiles];
for (int i = 0; i <ds.length; i++) {
for (int j = 0; j <ds[i].length; j++) {
ds[i][j] = Double.NaN;
ds0[i][j] = Double.NaN;
}
}
dbg_img [indx_d0] = dsi[0];
dbg_img [indx_s0] = dsi[1];
ErsCorrection ersCorrection = (ErsCorrection) geometryCorrection;
// save original
double [] saved_ers_wxyz_center = ersCorrection.ers_wxyz_center;
double [] saved_ers_wxyz_center_dt = ersCorrection.ers_wxyz_center_dt;
double [] saved_ers_wxyz_center_d2t = ersCorrection.ers_wxyz_center_d2t;
double [] saved_ers_watr_center_dt = ersCorrection.ers_watr_center_dt;
double [] saved_ers_watr_center_d2t = ersCorrection.ers_watr_center_d2t;
if (wxyz_center == null) wxyz_center = ersCorrection.ers_wxyz_center;
if (wxyz_center_dt == null) wxyz_center_dt = ersCorrection.ers_wxyz_center_dt;
if (wxyz_center_d2t == null) wxyz_center_d2t = ersCorrection.ers_wxyz_center_d2t;
if (watr_center_dt == null) watr_center_dt = ersCorrection.ers_watr_center_dt;
if (watr_center_d2t == null) watr_center_d2t = ersCorrection.ers_watr_center_d2t;
ersCorrection.setupERS(
wxyz_center, // double [] wxyz_center, // world camera XYZ (meters) for the frame center
wxyz_center_dt, // double [] wxyz_center_dt, // world camera Vx, Vy, Vz (m/s)
wxyz_center_d2t, // double [] wxyz_center_d2t, // world camera Vx, Vy, Vz (m/s^2)
watr_center_dt, // double [] watr_center_dt, // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
watr_center_d2t); // double [] watr_center_d2t); // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
for (int tileY = 0; tileY < tilesY; tileY++) {
int stileY = iscale * tileY + iscale/2;
for (int tileX = 0; tileX < tilesX; tileX++) {
int stileX = iscale * tileX + iscale/2;
int stile = stileX + stilesX * stileY;
int nTile = tileX + tileY * tilesX;
double centerX = tileX * transform_size + transform_size/2; // - shiftX;
double centerY = tileY * transform_size + transform_size/2; // - shiftY;
ds0[0][stile] = reduce* dbg_img [indx_d0][nTile]; // this.dsi[TwoQuadCLT.DSI_DISPARITY_MAIN][nTile];
ds0[1][stile] = reduce* dbg_img [indx_s0][nTile]; // this.dsi[TwoQuadCLT.DSI_STRENGTH_MAIN][nTile];
double [] wxyz = ersCorrection.getWorldCoordinatesERS(
centerX, // double px,
centerY, // double py,
dsi[0][nTile], // double disparity,
true, // boolean correctDistortions,// correct distortion (will need corrected background too !)
camera_xyz, // double [] camera_xyz, // camera center in world coordinates
camera_atr); // double [] camera_atr) // camera orientation relative to world frame
if ((wxyz != null) && (wxyz[2] < 0.0)) {
for (int i = 0; i < 4; i ++) {
dbg_img[indx_xyz+i][nTile] = wxyz[i];
}
}
}
}
ersCorrection.setupERS(
zero3, // double [] wxyz_center, // world camera XYZ (meters) for the frame center
zero3, // double [] wxyz_center_dt, // world camera Vx, Vy, Vz (m/s)
zero3, // double [] wxyz_center_d2t, // world camera Vx, Vy, Vz (m/s^2)
zero3, // double [] watr_center_dt, // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
zero3); // double [] watr_center_d2t); // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
for (int tileY = 0; tileY < tilesY; tileY++) {
for (int tileX = 0; tileX < tilesX; tileX++) {
int nTile = tileX + tileY * tilesX;
if (!Double.isNaN(dbg_img[indx_xyz][nTile])) {
double [] wxyz = {
dbg_img[indx_xyz][nTile],
dbg_img[indx_xyz+1][nTile],
dbg_img[indx_xyz+2][nTile],
dbg_img[indx_xyz+3][nTile]};
double [] pXpYD = ersCorrection.getImageCoordinatesERS(
wxyz, // double [] wxyz,
true, // boolean correctDistortions,// correct distortion (will need corrected background too !)
zero3, // double [] camera_xyz, // camera center in world coordinates
zero3, // double [] camera_atr) // camera orientation relative to world frame
line_err); // double line_err) // threshold error in scan lines (1.0)
if (pXpYD != null) {
dbg_img[indx_pX][nTile] = pXpYD[0];
dbg_img[indx_pY][nTile] = pXpYD[1];
int px = (int) Math.round(pXpYD[0]/transform_size);
int py = (int) Math.round(pXpYD[1]/transform_size);
int spx = (int) Math.round(pXpYD[0]*scale);
int spy = (int) Math.round(pXpYD[1]*scale);
if ((px >= 0) && (py >= 0) && (px < tilesX) & (py < tilesY)) {
if (!(pXpYD[2] < dbg_img[indx_d1][px + py* tilesX])) {
dbg_img[indx_s1][px + py* tilesX] = dbg_img[indx_s0][nTile];
dbg_img[indx_d1][px + py* tilesX] = pXpYD[2];
if ((spx >= 0) && (spy >= 0) && (spx < stilesX) & (spy < stilesY)) {
ds[1][spx + spy* stilesX] = reduce*dbg_img[indx_s0][nTile];
ds[0][spx + spy* stilesX] = reduce*pXpYD[2];
}
}
}
}
}
}
}
ds[0] = (new DoubleGaussianBlur()).blurWithNaN(
ds[0], // double[] pixels,
null, // double [] in_weight, // or null
stilesX, // int width,
stilesY, // int height,
sigma, // double sigmaX,
sigma, // double sigmaY,
0.01); // double accuracy);
ds[1] = (new DoubleGaussianBlur()).blurWithNaN(
ds[1], // double[] pixels,
null, // double [] in_weight, // or null
stilesX, // int width,
stilesY, // int height,
sigma, // double sigmaX,
sigma, // double sigmaY,
0.01); // double accuracy);
ds0[0] = (new DoubleGaussianBlur()).blurWithNaN(
ds0[0], // double[] pixels,
null, // double [] in_weight, // or null
stilesX, // int width,
stilesY, // int height,
sigma, // double sigmaX,
sigma, // double sigmaY,
0.01); // double accuracy);
ds0[1] = (new DoubleGaussianBlur()).blurWithNaN(
ds0[1], // double[] pixels,
null, // double [] in_weight, // or null
stilesX, // int width,
stilesY, // int height,
sigma, // double sigmaX,
sigma, // double sigmaY,
0.01); // double accuracy);
dbg_img[indx_s1] = new double[tiles];
dbg_img[indx_d1] = new double[tiles];
dbg_img[indx_s0] = new double[tiles];
dbg_img[indx_d0] = new double[tiles];
for (int stileY = 0; stileY < stilesY; stileY++) {
int tileY = stileY / iscale;
for (int stileX = 0; stileX < stilesX; stileX++) {
int tileX = stileX / iscale;
int stile = stileX + stileY * stilesX;
int tile = tileX + tileY * tilesX;
dbg_img[indx_s1][tile] += ds[1][stile];
dbg_img[indx_d1][tile] += ds[0][stile];
dbg_img[indx_s0][tile] += ds0[1][stile];
dbg_img[indx_d0][tile] += ds0[0][stile];
}
}
// restore original
ersCorrection.setupERS(
saved_ers_wxyz_center, // double [] wxyz_center, // world camera XYZ (meters) for the frame center
saved_ers_wxyz_center_dt, // double [] wxyz_center_dt, // world camera Vx, Vy, Vz (m/s)
saved_ers_wxyz_center_d2t, // double [] wxyz_center_d2t, // world camera Vx, Vy, Vz (m/s^2)
saved_ers_watr_center_dt, // double [] watr_center_dt, // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
saved_ers_watr_center_d2t); // double [] watr_center_d2t); // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
return dbg_img;
}
@Deprecated
public double[][] test_back_forth0(
double [] camera_xyz,
double [] camera_atr)
{
camera_atr[0] = 1.0;
double [] camera_xyz0 = {0.0,0.0,0.0};
double [] camera_atr0 = {0.0,0.0,0.0};
double [] camera_xyz1 = {0.0,0.0,0.0};
double [] camera_atr1 = {0.0,0.0,0.0};
double line_err = 10.0; // 0.1; // bug
int tilesX = tp.getTilesX();
int tilesY = tp.getTilesY();
int tiles = tilesX*tilesY;
int transform_size = tp.getTileSize();
String [] titles = {"s0", "s1", "d0", "d1", "pX0","pX", "pY0","pY", "x","y","z","w"};
// double [] ers_watr_center_dt = {0.168792, 0.037145, -0.060279};
double [] zero3 = {0.0,0.0,0.0};
// double [] watr_center_dt = {0.168792, 0.037145, -0.060279};
double [] watr_center_dt = {0.0, 0.0, 0.0};
double [] watr_center_d2t = {0.0, 0.0, 0.0};
double [] wxyz_center = {0.0, 0.0, 0.0};
double [] wxyz_center_dt = {0.0, 0.0, 0.0};
double [] wxyz_center_d2t = {0.0, 0.0, 0.0};
boolean force_manual = false; // true;
int indx_s0 = 0;
int indx_s1 = 1;
int indx_d0 = 2;
int indx_d1 = 3;
int indx_pX0 = 4;
int indx_pX = 5;
int indx_pY0 = 6;
int indx_pY = 7;
int indx_xyz = 8;
double [][] dbg_img = new double [titles.length][tiles];
for (int i = 0; i <dbg_img.length; i++) {
for (int j = 0; j <dbg_img[i].length; j++) {
dbg_img[i][j] = Double.NaN;
}
}
final int iscale = 8;
int stilesX = iscale*tilesX;
int stilesY = iscale*tilesY;
int stiles = stilesX*stilesY;
double sigma = 0.5 * iscale;
double scale = 1.0*iscale/transform_size;
double reduce = 1.0/iscale/iscale;
double [][] ds = new double [2][stiles];
double [][] ds0 = new double [2][stiles];
for (int i = 0; i <ds.length; i++) {
for (int j = 0; j <ds[i].length; j++) {
ds[i][j] = Double.NaN;
ds0[i][j] = Double.NaN;
}
}
dbg_img [indx_d0] = this.dsi[TwoQuadCLT.DSI_DISPARITY_MAIN];
dbg_img [indx_s0] = this.dsi[TwoQuadCLT.DSI_STRENGTH_MAIN];
ErsCorrection ersCorrection = (ErsCorrection) geometryCorrection;
// save original
double [] saved_ers_wxyz_center = ersCorrection.ers_wxyz_center;
double [] saved_ers_wxyz_center_dt = ersCorrection.ers_wxyz_center_dt;
double [] saved_ers_wxyz_center_d2t = ersCorrection.ers_wxyz_center_d2t;
double [] saved_ers_watr_center_dt = ersCorrection.ers_watr_center_dt;
double [] saved_ers_watr_center_d2t = ersCorrection.ers_watr_center_d2t;
System.out.println("\n"+image_name+":\n"+ersCorrection.extrinsic_corr.toString());
System.out.println(String.format("%s: ers_wxyz_center= %f, %f, %f", image_name,saved_ers_wxyz_center[0], saved_ers_wxyz_center[1],saved_ers_wxyz_center[2] ));
System.out.println(String.format("%s: ers_wxyz_center_dt= %f, %f, %f", image_name,saved_ers_wxyz_center_dt[0], saved_ers_wxyz_center_dt[1],saved_ers_wxyz_center_dt[2] ));
System.out.println(String.format("%s: ers_wxyz_center_d2t= %f, %f, %f", image_name,saved_ers_wxyz_center_d2t[0], saved_ers_wxyz_center_d2t[1],saved_ers_wxyz_center_d2t[2] ));
System.out.println(String.format("%s: ers_watr_center_dt= %f, %f, %f", image_name,saved_ers_watr_center_dt[0], saved_ers_watr_center_dt[1],saved_ers_watr_center_dt[2] ));
System.out.println(String.format("%s: ers_watr_center_d2t= %f, %f, %f", image_name,saved_ers_watr_center_d2t[0], saved_ers_watr_center_d2t[1],saved_ers_watr_center_d2t[2] ));
// Comment out to use manual values
if (force_manual) {
System.out.println("\nUsing manually set values:");
System.out.println(String.format("%s: wxyz_center= %f, %f, %f", image_name,wxyz_center[0], wxyz_center[1],wxyz_center[2] ));
System.out.println(String.format("%s: wxyz_center_dt= %f, %f, %f", image_name,wxyz_center_dt[0], wxyz_center_dt[1],wxyz_center_dt[2] ));
System.out.println(String.format("%s: wxyz_center_d2t= %f, %f, %f", image_name,wxyz_center_d2t[0], wxyz_center_d2t[1],wxyz_center_d2t[2] ));
System.out.println(String.format("%s: watr_center_dt= %f, %f, %f", image_name,watr_center_dt[0], watr_center_dt[1],watr_center_dt[2] ));
System.out.println(String.format("%s: watr_center_d2t= %f, %f, %f", image_name,watr_center_d2t[0], watr_center_d2t[1],watr_center_d2t[2] ));
}else {
wxyz_center = saved_ers_wxyz_center;
wxyz_center_dt = saved_ers_wxyz_center_dt;
wxyz_center_d2t = saved_ers_wxyz_center_d2t; // double [] wxyz_center_d2t, // world camera Vx, Vy, Vz (m/s^2)
watr_center_dt = saved_ers_watr_center_dt;
watr_center_d2t = saved_ers_watr_center_d2t;
}
ersCorrection.setupERS(
zero3, // double [] wxyz_center, // world camera XYZ (meters) for the frame center
wxyz_center_dt, // double [] wxyz_center_dt, // world camera Vx, Vy, Vz (m/s)
wxyz_center_d2t, // double [] wxyz_center_d2t, // world camera Vx, Vy, Vz (m/s^2)
watr_center_dt, // double [] watr_center_dt, // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
watr_center_d2t); // double [] watr_center_d2t); // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
for (int tileY = 0; tileY < tilesY; tileY++) {
int stileY = iscale * tileY + iscale/2;
for (int tileX = 0; tileX < tilesX; tileX++) {
int stileX = iscale * tileX + iscale/2;
int stile = stileX + stilesX * stileY;
int nTile = tileX + tileY * tilesX;
double centerX = tileX * transform_size + transform_size/2; // - shiftX;
double centerY = tileY * transform_size + transform_size/2; // - shiftY;
ds0[0][stile] = reduce* dbg_img [indx_d0][nTile]; // this.dsi[TwoQuadCLT.DSI_DISPARITY_MAIN][nTile];
ds0[1][stile] = reduce* dbg_img [indx_s0][nTile]; // this.dsi[TwoQuadCLT.DSI_STRENGTH_MAIN][nTile];
dbg_img[indx_pX0][nTile] = centerX;
dbg_img[indx_pY0][nTile] = centerY;
double [] wxyz = ersCorrection.getWorldCoordinatesERS(
centerX, // double px,
centerY, // double py,
this.dsi[TwoQuadCLT.DSI_DISPARITY_MAIN][nTile], // double disparity,
true, // boolean correctDistortions,// correct distortion (will need corrected background too !)
camera_xyz0, // double [] camera_xyz, // camera center in world coordinates
camera_atr0); // double [] camera_atr) // camera orientation relative to world frame
if ((wxyz != null) && (wxyz[2] < 0.0)) {
for (int i = 0; i < 4; i ++) {
dbg_img[indx_xyz+i][nTile] = wxyz[i];
}
}
}
}
ersCorrection.setupERS(
zero3, // double [] wxyz_center, // world camera XYZ (meters) for the frame center
zero3, // double [] wxyz_center_dt, // world camera Vx, Vy, Vz (m/s)
zero3, // double [] wxyz_center_d2t, // world camera Vx, Vy, Vz (m/s^2)
zero3, // double [] watr_center_dt, // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
zero3); // double [] watr_center_d2t); // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
for (int tileY = 0; tileY < tilesY; tileY++) {
for (int tileX = 0; tileX < tilesX; tileX++) {
int nTile = tileX + tileY * tilesX;
if (!Double.isNaN(dbg_img[indx_xyz][nTile])) {
double [] wxyz = {
dbg_img[indx_xyz][nTile],
dbg_img[indx_xyz+1][nTile],
dbg_img[indx_xyz+2][nTile],
dbg_img[indx_xyz+3][nTile]};
double [] pXpYD = ersCorrection.getImageCoordinatesERS(
wxyz, // double [] wxyz,
true, // boolean correctDistortions,// correct distortion (will need corrected background too !)
camera_xyz1, // double [] camera_xyz, // camera center in world coordinates
camera_atr1, // double [] camera_atr) // camera orientation relative to world frame
line_err); // double line_err) // threshold error in scan lines (1.0)
if (pXpYD != null) {
dbg_img[indx_pX][nTile] = pXpYD[0];
dbg_img[indx_pY][nTile] = pXpYD[1];
int px = (int) Math.round(pXpYD[0]/transform_size);
int py = (int) Math.round(pXpYD[1]/transform_size);
int spx = (int) Math.round(pXpYD[0]*scale);
int spy = (int) Math.round(pXpYD[1]*scale);
if ((px >= 0) && (py >= 0) && (px < tilesX) & (py < tilesY)) {
if (!(pXpYD[2] < dbg_img[indx_d1][px + py* tilesX])) {
dbg_img[indx_s1][px + py* tilesX] = dbg_img[indx_s0][nTile];
dbg_img[indx_d1][px + py* tilesX] = pXpYD[2];
if ((spx >= 0) && (spy >= 0) && (spx < stilesX) & (spy < stilesY)) {
ds[1][spx + spy* stilesX] = reduce*dbg_img[indx_s0][nTile];
ds[0][spx + spy* stilesX] = reduce*pXpYD[2];
}
}
}
}
}
}
}
ds[0] = (new DoubleGaussianBlur()).blurWithNaN(
ds[0], // double[] pixels,
null, // double [] in_weight, // or null
stilesX, // int width,
stilesY, // int height,
sigma, // double sigmaX,
sigma, // double sigmaY,
0.01); // double accuracy);
ds[1] = (new DoubleGaussianBlur()).blurWithNaN(
ds[1], // double[] pixels,
null, // double [] in_weight, // or null
stilesX, // int width,
stilesY, // int height,
sigma, // double sigmaX,
sigma, // double sigmaY,
0.01); // double accuracy);
ds0[0] = (new DoubleGaussianBlur()).blurWithNaN(
ds0[0], // double[] pixels,
null, // double [] in_weight, // or null
stilesX, // int width,
stilesY, // int height,
sigma, // double sigmaX,
sigma, // double sigmaY,
0.01); // double accuracy);
ds0[1] = (new DoubleGaussianBlur()).blurWithNaN(
ds0[1], // double[] pixels,
null, // double [] in_weight, // or null
stilesX, // int width,
stilesY, // int height,
sigma, // double sigmaX,
sigma, // double sigmaY,
0.01); // double accuracy);
dbg_img[indx_s1] = new double[tiles];
dbg_img[indx_d1] = new double[tiles];
dbg_img[indx_s0] = new double[tiles];
dbg_img[indx_d0] = new double[tiles];
for (int stileY = 0; stileY < stilesY; stileY++) {
int tileY = stileY / iscale;
for (int stileX = 0; stileX < stilesX; stileX++) {
int tileX = stileX / iscale;
int stile = stileX + stileY * stilesX;
int tile = tileX + tileY * tilesX;
dbg_img[indx_s1][tile] += ds[1][stile];
dbg_img[indx_d1][tile] += ds[0][stile];
dbg_img[indx_s0][tile] += ds0[1][stile];
dbg_img[indx_d0][tile] += ds0[0][stile];
}
}
String title = String.format("%s_%f:%f:%f_%f:%f:%f",image_name,
camera_xyz0[0],camera_xyz0[1],camera_xyz0[2],camera_atr0[0],camera_atr0[1],camera_atr0[2]);
title += String.format ("_ers_%f:%f:%f_%f:%f:%f",wxyz_center_dt[0],wxyz_center_dt[1],wxyz_center_dt[2],
watr_center_dt[0],watr_center_dt[1],watr_center_dt[2]);
(new ShowDoubleFloatArrays()).showArrays(
dbg_img,
tilesX,
tilesY,
true,
title,
titles);
// restore original
ersCorrection.setupERS(
saved_ers_wxyz_center, // double [] wxyz_center, // world camera XYZ (meters) for the frame center
saved_ers_wxyz_center_dt, // double [] wxyz_center_dt, // world camera Vx, Vy, Vz (m/s)
saved_ers_wxyz_center_d2t, // double [] wxyz_center_d2t, // world camera Vx, Vy, Vz (m/s^2)
saved_ers_watr_center_dt, // double [] watr_center_dt, // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
saved_ers_watr_center_d2t); // double [] watr_center_d2t); // camera rotaions (az, tilt, roll in radians/s, corresponding to the frame center)
return dbg_img;
}
public void setGPU(GPUTileProcessor.GpuQuad gpuQuad) {
this.gpuQuad = gpuQuad;
}
......@@ -1783,15 +3214,20 @@ public class QuadCLT extends QuadCLTCPU {
public void setPassAvgRBGA( // get image from a single pass, return relative path for x3d // USED in lwir
CLTParameters clt_parameters,
int scanIndex,
// int scanIndex,
CLTPass3d scan,
int threadsMax, // maximal number of threads to launch
boolean updateStatus,
int debugLevel)
{
if ((gpuQuad != null) && (isAux()?clt_parameters.gpu_use_aux : clt_parameters.gpu_use_main)) {
final int tilesX = tp.getTilesX();
final int tilesY = tp.getTilesY();
CLTPass3d scan = tp.clt_3d_passes.get(scanIndex);
// CLTPass3d scan = tp.clt_3d_passes.get(scanIndex);
// final int tilesX = tp.getTilesX();
// final int tilesY = tp.getTilesY();
final int tilesX = scan.getTileProcessor().getTilesX();
final int tilesY = scan.getTileProcessor().getTilesY();
double [] disparity = scan.getDisparity();
double [] strength = scan.getStrength();
boolean [] selection = null; // scan.getSelected();
......@@ -1822,8 +3258,8 @@ public class QuadCLT extends QuadCLTCPU {
}
super.setPassAvgRBGA( // get image from a single pass, return relative path for x3d // USED in lwir
clt_parameters,
scanIndex,
threadsMax, // maximal number of threads to launch
scan, // scanIndex,
threadsMax, // maximal number of threads to launch
updateStatus,
debugLevel);
}
......@@ -2321,7 +3757,7 @@ public class QuadCLT extends QuadCLTCPU {
float [][][][] fpxpy = new float[tilesY][tilesX][][];
final double gpu_fat_zero = clt_parameters.getGpuFatZero(isMonochrome());
image_dtt.clt_aberrations_quad_corr_GPU( // USED in LWIR
image_dtt.clt_aberrations_quad_corr_GPU(
clt_parameters.img_dtt, // final ImageDttParameters imgdtt_params, // Now just extra correlation parameters, later will include, most others
1, // final int macro_scale, // to correlate tile data instead of the pixel data: 1 - pixels, 8 - tiles
tile_op, // per-tile operation bit codes
......
......@@ -27,7 +27,12 @@ package com.elphel.imagej.tileprocessor;
//import java.awt.Polygon;
import java.awt.Rectangle;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
......@@ -37,6 +42,7 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.Enumeration;
import java.util.List;
import java.util.Properties;
import java.util.Set;
......@@ -84,6 +90,13 @@ public class QuadCLTCPU {
public static final int FGBG_AUX_DISP = 8; // AUX calculated disparity
public static final int FGBG_AUX_STR = 9; // AUX calculated strength
public static final int DSRBG_DISPARITY = 0;
public static final int DSRBG_STRENGTH = 1;
public static final int DSRBG_RED = 2;
public static final int DSRBG_BLUE = 3;
public static final int DSRBG_GREEN = 4;
// public GPUTileProcessor.GpuQuad gpuQuad = null;
static String [] fine_corr_coeff_names = {"A","B","C","D","E","F"};
......@@ -91,8 +104,8 @@ public class QuadCLTCPU {
public static String PREFIX = "EYESIS_DCT."; // change later (first on save)
public static String PREFIX_AUX = "EYESIS_DCT_AUX."; // change later (first on save)
static int QUAD = 4; // number of cameras
public Properties properties = null;
// public String properties_prefix = "EYESIS_DCT.";
public EyesisCorrections eyesisCorrections = null;
public EyesisCorrectionParameters.CorrectionParameters correctionsParameters=null;
double [][][][][][] clt_kernels = null; // can be used to determine monochrome too?
......@@ -124,7 +137,334 @@ public class QuadCLTCPU {
double [] lwir_cold_hot = null;
// int [] woi_tops; // used to calculate scanline timing
// just for debugging with the use of intermediate image
public double [][] dsi = null; // DSI to be saved/restored in the model
public double [][] ds_from_main = null;
public double [][] dsrbg = null; // D, S, R,B,G
public QuadCLTCPU(
QuadCLTCPU qParent,
String name
){
// create from existing instance
this.properties = new Properties(); // properties will be different
// is it needed at all?
for (Enumeration<?> e = qParent.properties.propertyNames(); e.hasMoreElements();) {
String key = (String) e.nextElement();
this.properties.setProperty(key, qParent.properties.getProperty(key));
}
this.properties.putAll(qParent.properties);
this.eyesisCorrections= qParent.eyesisCorrections;
this.correctionsParameters = qParent.correctionsParameters;
this.clt_kernels = qParent.clt_kernels;
if (qParent.geometryCorrection != null) {
this.geometryCorrection = new ErsCorrection(qParent.geometryCorrection, true);
}
this.extrinsic_vect = qParent.extrinsic_vect.clone();
this.extra_items = qParent.extra_items;
this.eyesisKernelImage = qParent.eyesisKernelImage; // most likely not needed
this.startTime = qParent.startTime; // start of batch processing
this.startSetTime = qParent.startSetTime; // start of set processing
this.startStepTime = qParent.startStepTime; // start of step processing
this.fine_corr = ErsCorrection.clone3d(qParent.fine_corr);
///tp will have only needed data, large array will be nulls, same with clt_3d_passes
if (qParent.tp != null) {
this.tp = new TileProcessor(qParent.tp);
}
this.image_name = name; // qParent.image_name;
this.image_path = qParent.image_path;
this.gps_lla = ErsCorrection.clone1d(qParent.gps_lla);
if (qParent.image_data != null) this.image_data = qParent.image_data.clone(); // each camera will be re-written, not just modified, so shallow copy
this.new_image_data = qParent.new_image_data;
if (qParent.saturation_imp != null) this.saturation_imp = qParent.saturation_imp.clone(); // each camera will be re-written, not just modified, so shallow copy
this.is_aux = qParent.is_aux;
this.is_aux = qParent.is_mono;
this.lwir_offsets = ErsCorrection.clone1d(qParent.lwir_offsets);
this.lwir_offset = qParent.lwir_offset;
this.lwir_cold_hot = ErsCorrection.clone1d(qParent.lwir_cold_hot);
this.ds_from_main = ErsCorrection.clone2d(qParent.ds_from_main);
this.tp = qParent.tp;
}
public QuadCLT spawnQuadCLT(
// QuadCLTCPU quadCLT_master,
String set_name,
CLTParameters clt_parameters,
ColorProcParameters colorProcParameters, //
// String [] sourceFiles,
// String set_name,
// double [] referenceExposures,
// int [] channelFiles,
// double [] scaleExposures,
// boolean [][] saturation_imp,
int threadsMax,
int debugLevel)
{
QuadCLT quadCLT = new QuadCLT(this, set_name);
quadCLT.restoreFromModel(
clt_parameters,
colorProcParameters,
threadsMax,
debugLevel);
// quadCLT.showDSIMain();
// System.out.println("\n image_name="+(quadCLT.image_name)+"\n"+quadCLT.geometryCorrection.getCorrVector().toString());
// add to generator ?
/*
quadCLT.saveInterProperties( // save properties for interscene processing (extrinsics, ers, ...)
null, // String path, // full name with extension or w/o path to use x3d directory
-2); // int debugLevel)
*/
return quadCLT;
}
public double getTimeStamp() {
return Double.parseDouble(image_name.replace("_", "."));
}
public int restoreDSI(String suffix) // "-DSI_COMBO", "-DSI_MAIN" (DSI_COMBO_SUFFIX, DSI_MAIN_SUFFIX)
{
this.dsi = new double [TwoQuadCLT.DSI_SLICES.length][];
return restoreDSI(suffix,dsi);
}
public int restoreDSI(String suffix, // "-DSI_COMBO", "-DSI_MAIN" (DSI_COMBO_SUFFIX, DSI_MAIN_SUFFIX)
double [][] dsi) {
String x3d_path= correctionsParameters.selectX3dDirectory( // for x3d and obj
correctionsParameters.getModelName(image_name), // quad timestamp. Will be ignored if correctionsParameters.use_x3d_subdirs is false
correctionsParameters.x3dModelVersion,
true, // smart,
true); //newAllowed, // save
String file_path = x3d_path + Prefs.getFileSeparator() + image_name + suffix + ".tiff";
ImagePlus imp = null;
try {
imp = new ImagePlus(file_path);
} catch (Exception e) {
System.out.println ("Failed to open "+file_path);
return -1;
}
System.out.println("restoreDSI(): got "+imp.getStackSize()+" slices");
if (imp.getStackSize() < 2) {
System.out.println ("Failed to read "+file_path);
return -1;
}
int num_slices_read = 0;
ImageStack dsi_stack = imp.getStack();
for (int nl = 0; nl < imp.getStackSize(); nl++) {
for (int n = 0; n < TwoQuadCLT.DSI_SLICES.length; n++)
if (TwoQuadCLT.DSI_SLICES[n].equals(dsi_stack.getSliceLabel(nl + 1))) {
float [] fpixels = (float[]) dsi_stack.getPixels(nl + 1);
dsi[n] = new double [fpixels.length];
for (int i = 0; i < fpixels.length; i++) {
dsi[n][i] = fpixels[i];
}
num_slices_read ++;
break;
}
}
return num_slices_read;
}
public void saveInterProperties( // save properties for interscene processing (extrinsics, ers, ...)
String path, // full name with extension or w/o path to use x3d directory
// Properties properties, // if null - will only save extrinsics)
int debugLevel)
{
// upggrade to ErsCorrection (including setting initial velocities and angular velocities, resets accelerations, resets scenes
if (!(geometryCorrection instanceof ErsCorrection)) {
geometryCorrection = new ErsCorrection(geometryCorrection, false); // no need to copy just created gc
}
// update properties from potentially modified parameters (others should be updated
if (path == null) {
path = image_name + "-INTERFRAME"+".corr-xml";
}
if (!path.contains(Prefs.getFileSeparator())) {
String x3d_path= correctionsParameters.selectX3dDirectory( // for x3d and obj
correctionsParameters.getModelName(image_name), // quad timestamp. Will be ignored if correctionsParameters.use_x3d_subdirs is false
correctionsParameters.x3dModelVersion,
true, // smart,
true); //newAllowed, // save
path = x3d_path+Prefs.getFileSeparator()+path;
}
Properties inter_properties = new Properties();
setProperties(QuadCLT.PREFIX,inter_properties);
// quadCLT_aux.setProperties(QuadCLT.PREFIX_AUX,properties);
OutputStream os;
try {
os = new FileOutputStream(path);
} catch (FileNotFoundException e1) {
// missing config directory
File dir = (new File(path)).getParentFile();
if (!dir.exists()){
dir.mkdirs();
try {
os = new FileOutputStream(path);
} catch (FileNotFoundException e2) {
IJ.showMessage("Error","Failed to create directory "+dir.getName()+" to save configuration file: "+path);
return;
}
} else {
IJ.showMessage("Error","Failed to open configuration file: "+path);
return;
}
}
try {
inter_properties.storeToXML(os,
"last updated " + new java.util.Date(), "UTF8");
} catch (IOException e) {
IJ.showMessage("Error","Failed to write XML configuration file: "+path);
return;
}
try {
os.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
if (debugLevel> -3) {
System.out.println("Configuration parameters are saved to "+path);
}
}
public Properties restoreInterProperties( // restore properties for interscene processing (extrinsics, ers, ...)
String path, // full name with extension or null to use x3d directory
// Properties properties, // if null - will only save extrinsics)
int debugLevel)
{
if (path == null) {
path = image_name + ((properties == null) ? "-INTERFRAME":"")+".corr-xml";
}
if (!path.contains(Prefs.getFileSeparator())) {
String x3d_path= correctionsParameters.selectX3dDirectory( // for x3d and obj
correctionsParameters.getModelName(image_name), // quad timestamp. Will be ignored if correctionsParameters.use_x3d_subdirs is false
correctionsParameters.x3dModelVersion,
true, // smart,
true); //newAllowed, // save
path = x3d_path+Prefs.getFileSeparator()+path;
}
properties = loadProperties(
path, // String path,
properties); // Properties properties)
// if (properties == null) {
// properties = new Properties();
// }
String prefix = is_aux?PREFIX_AUX:PREFIX;
getProperties(prefix); // will set Geometry correction non-null
if (!(geometryCorrection instanceof ErsCorrection)) { // should only be for the new GeometryCorrection created in getProperties
geometryCorrection = new ErsCorrection(geometryCorrection, false); // no need to copy just created gc
}
ErsCorrection ers = (ErsCorrection) geometryCorrection;
ers.getPropertiesPose(prefix, properties);
ers.getPropertiesERS(prefix, properties);
ers.getPropertiesScenes(prefix, properties);
return properties;
}
public void saveDSI() { saveDSI(this.dsi);}
public void saveDSI(
double [][] dsi
)
{
String x3d_path= correctionsParameters.selectX3dDirectory( // for x3d and obj
correctionsParameters.getModelName(image_name), // quad timestamp. Will be ignored if correctionsParameters.use_x3d_subdirs is false
correctionsParameters.x3dModelVersion,
true, // smart,
true); //newAllowed, // save
String title = image_name+TwoQuadCLT.DSI_COMBO_SUFFIX;
ImagePlus imp = (new ShowDoubleFloatArrays()).makeArrays(dsi,tp.getTilesX(), tp.getTilesY(), title, TwoQuadCLT.DSI_SLICES);
eyesisCorrections.saveAndShow(
imp, // ImagePlus imp,
x3d_path, // String path,
false, // boolean png,
false, // boolean show,
0); // int jpegQuality)
}
public void showDSI(){ showDSI(this.dsi);}
public void showDSI(double [][] dsi)
{
String title = image_name + TwoQuadCLT.DSI_COMBO_SUFFIX;
(new ShowDoubleFloatArrays()).showArrays(dsi, tp.getTilesX(), tp.getTilesY(), true, title, TwoQuadCLT.DSI_SLICES);
}
public void saveDSIMain(){saveDSIMain(this.dsi);}
public void saveDSIMain(
double [][] dsi) // DSI_SLICES.length
{
String x3d_path= correctionsParameters.selectX3dDirectory( // for x3d and obj
correctionsParameters.getModelName(image_name), // quad timestamp. Will be ignored if correctionsParameters.use_x3d_subdirs is false
correctionsParameters.x3dModelVersion,
true, // smart,
true); //newAllowed, // save
String title = image_name+"-DSI_MAIN";
String [] titles = {TwoQuadCLT.DSI_SLICES[TwoQuadCLT.DSI_DISPARITY_MAIN], TwoQuadCLT.DSI_SLICES[TwoQuadCLT.DSI_STRENGTH_MAIN]};
double [][] dsi_main = {dsi[TwoQuadCLT.DSI_DISPARITY_MAIN], dsi[TwoQuadCLT.DSI_STRENGTH_MAIN]};
ImagePlus imp = (new ShowDoubleFloatArrays()).makeArrays(dsi_main, tp.getTilesX(), tp.getTilesY(), title, titles);
eyesisCorrections.saveAndShow(
imp, // ImagePlus imp,
x3d_path, // String path,
false, // boolean png,
false, // boolean show,
0); // int jpegQuality)
}
// Save GT from main and AUX calculated DS
public void saveDSIGTAux(
QuadCLT quadCLT_aux,
double [][] dsi_aux_from_main)
{
String x3d_path= correctionsParameters.selectX3dDirectory( // for x3d and obj
correctionsParameters.getModelName(image_name), // quad timestamp. Will be ignored if correctionsParameters.use_x3d_subdirs is false
correctionsParameters.x3dModelVersion,
true, // smart,
true); //newAllowed, // save
String title = quadCLT_aux.image_name+"-DSI_GT-AUX";
// String [] titles = {DSI_SLICES[DSI_DISPARITY_MAIN], DSI_SLICES[DSI_STRENGTH_MAIN]};
// double [][] dsi_main = {dsi[DSI_DISPARITY_MAIN], dsi[DSI_STRENGTH_MAIN]};
ImagePlus imp = (new ShowDoubleFloatArrays()).makeArrays(
dsi_aux_from_main, // dsi_main,
quadCLT_aux.tp.getTilesX(),
quadCLT_aux.tp.getTilesY(),
title,
QuadCLT.FGBG_TITLES_AUX); // titles);
eyesisCorrections.saveAndShow(
imp, // ImagePlus imp,
x3d_path, // String path,
false, // boolean png,
false, // boolean show,
0); // int jpegQuality)
}
public void showDSIMain() {
showDSIMain(this.dsi);
}
public void showDSIMain(
double [][] dsi)
{
String title = image_name+"-DSI_MAIN";
String [] titles = {TwoQuadCLT.DSI_SLICES[TwoQuadCLT.DSI_DISPARITY_MAIN], TwoQuadCLT.DSI_SLICES[TwoQuadCLT.DSI_STRENGTH_MAIN]};
double [][] dsi_main = {dsi[TwoQuadCLT.DSI_DISPARITY_MAIN], dsi[TwoQuadCLT.DSI_STRENGTH_MAIN]};
(new ShowDoubleFloatArrays()).showArrays(dsi_main,tp.getTilesX(), tp.getTilesY(), true, title, titles);
}
public boolean hasNewImageData() {
return new_image_data;
}
......@@ -262,16 +602,40 @@ public class QuadCLTCPU {
this.correctionsParameters = correctionsParameters;
this.properties = properties;
is_aux = prefix.equals(PREFIX_AUX);
// this.properties_prefix = prefix;
// System.out.println("new QuadCLTCPU(), prefix = "+prefix);
getProperties(prefix);
}
// TODO:Add saving just calibration
public static Properties loadProperties(
String path,
Properties properties){
if (properties == null) {
properties = new Properties();
}
InputStream is;
try {
is = new FileInputStream(path);
} catch (FileNotFoundException e) {
IJ.showMessage("Error","Failed to open configuration file: "+path);
return null;
}
try {
properties.loadFromXML(is);
} catch (IOException e) {
IJ.showMessage("Error","Failed to read XML configuration file: "+path);
return null;
}
try {
is.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return properties;
// getAllProperties(properties);
// if (DEBUG_LEVEL>0) System.out.println("Configuration parameters are restored from "+path);
}
// public void setProperties(){
// setProperties(this.properties_prefix);
// }
public void setProperties(String prefix, Properties properties){ // save // USED in lwir
if (properties == null) {
properties = this.properties;
......@@ -296,6 +660,12 @@ public class QuadCLTCPU {
if (is_aux && (gc.rigOffset != null)) {
gc.rigOffset.setProperties(prefix,properties);
}
if (gc instanceof ErsCorrection) {
ErsCorrection ers = (ErsCorrection) gc;
ers.setPropertiesPose(prefix, properties);
ers.setPropertiesERS(prefix, properties);
ers.setPropertiesScenes(prefix, properties);
}
}
......@@ -335,6 +705,16 @@ public class QuadCLTCPU {
public GeometryCorrection getGeometryCorrection() { // USED in lwir
return geometryCorrection;
}
public ErsCorrection getErsCorrection() { // USED in lwir
if (geometryCorrection instanceof ErsCorrection) {
return (ErsCorrection) geometryCorrection;
} else {
return new ErsCorrection (geometryCorrection, false); // just upgrade
}
}
public double [][][][][][] getCLTKernels(){ // USED in lwir
return clt_kernels;
}
......@@ -379,6 +759,7 @@ public class QuadCLTCPU {
// if (is_aux && (geometryCorrection != null)) {
// geometryCorrection.setRigOffsetFromProperies(prefix, properties);
// }
if (geometryCorrection == null) {
double [] extrinsic_vect_saved = this.extrinsic_vect.clone();
boolean OK = initGeometryCorrection(0); // int debugLevel);
......@@ -395,6 +776,8 @@ public class QuadCLTCPU {
if (is_aux) {
geometryCorrection.setRigOffsetFromProperies(prefix, properties);
}
// inter-frame properties only make sense for, well, scenes. So they will only be read
}
public void setKernelImageFile(ImagePlus img_kernels){ // not used in lwir
......@@ -3223,6 +3606,17 @@ public class QuadCLTCPU {
String single_set_name, // process only files that contain specified series (timestamp) in the name
int debugLevel) {
String [] sourceFiles=correctionsParameters.getSourcePaths();
return setChannels( // USED in lwir
single_set_name, // process only files that contain specified series (timestamp) in the name
sourceFiles,
debugLevel);
}
SetChannels [] setChannels( // USED in lwir
String single_set_name, // process only files that contain specified series (timestamp) in the name
String [] sourceFiles,
int debugLevel) {
// String [] sourceFiles=correctionsParameters.getSourcePaths();
boolean [] enabledFiles=new boolean[sourceFiles.length];
for (int i=0;i<enabledFiles.length;i++) enabledFiles[i]=false;
int numFilesToProcess=0;
......@@ -3349,7 +3743,11 @@ public class QuadCLTCPU {
", EXPOSURE = "+imp_srcs[srcChannel].getProperty("EXPOSURE"));
}
}
imp_srcs[srcChannel].setProperty("name", correctionsParameters.getNameFromSourceTiff(sourceFiles[nFile]));
String name_from_dir = correctionsParameters.getNameFromSourceTiff(sourceFiles[nFile]);
if (name_from_dir.equals("jp4")) {
name_from_dir = set_name; // to fix save source files copy in the model/jp4
}
imp_srcs[srcChannel].setProperty("name", name_from_dir);
imp_srcs[srcChannel].setProperty("channel", srcChannel); // it may already have channel
imp_srcs[srcChannel].setProperty("path", sourceFiles[nFile]); // it may already have channel
......@@ -3378,7 +3776,9 @@ public class QuadCLTCPU {
if (pixels[y*width+x+width+1] > max_pix[3]) max_pix[3] = pixels[y*width+x+width+1];
}
}
System.out.println(String.format("channel %d max_pix[] = %6.2f %6.2f %6.2f %6.2f", srcChannel, max_pix[0], max_pix[1], max_pix[2], max_pix[3]));
if (debugLevel > -2) {
System.out.println(String.format("channel %d max_pix[] = %6.2f %6.2f %6.2f %6.2f", srcChannel, max_pix[0], max_pix[1], max_pix[2], max_pix[3]));
}
dbg_dpixels[srcChannel] = new double [pixels.length];
for (int i = 0; i < pixels.length; i++) dbg_dpixels[srcChannel][i] = pixels[i];
// imp_srcs[srcChannel].show();
......@@ -3391,8 +3791,10 @@ public class QuadCLTCPU {
Double.parseDouble((String) imp_srcs[srcChannel].getProperty("saturation_0")),
Double.parseDouble((String) imp_srcs[srcChannel].getProperty("saturation_3")),
Double.parseDouble((String) imp_srcs[srcChannel].getProperty("saturation_2"))};
System.out.println(String.format("channel %d saturations = %6.2f %6.2f %6.2f %6.2f", srcChannel,
if (debugLevel > -2) {
System.out.println(String.format("channel %d saturations = %6.2f %6.2f %6.2f %6.2f", srcChannel,
saturations[0],saturations[1],saturations[2],saturations[3]));
}
double [] scaled_saturations = new double [saturations.length];
for (int i = 0; i < scaled_saturations.length; i++){
scaled_saturations[i] = saturations[i] * clt_parameters.sat_level;
......@@ -3411,7 +3813,9 @@ public class QuadCLTCPU {
if (!is_lwir) { // no vigneting correction and no color scaling
if (this.correctionsParameters.vignetting && correct_vignetting){
if ((eyesisCorrections.channelVignettingCorrection==null) || (srcChannel<0) || (srcChannel>=eyesisCorrections.channelVignettingCorrection.length) || (eyesisCorrections.channelVignettingCorrection[srcChannel]==null)){
System.out.println("No vignetting data for channel "+srcChannel);
if (debugLevel > -3) {
System.out.println("No vignetting data for channel "+srcChannel);
}
return null; // not used in lwir
}
/// float [] pixels=(float []) imp_srcs[srcChannel].getProcessor().getPixels();
......@@ -3467,8 +3871,9 @@ public class QuadCLTCPU {
}
}
double max_vign_corr = clt_parameters.vignetting_range*min_non_zero;
System.out.println("Vignetting data: channel="+srcChannel+", min = "+min_non_zero);
if (debugLevel > -2) {
System.out.println("Vignetting data: channel="+srcChannel+", min = "+min_non_zero);
}
for (int i=0;i<pixels.length;i++){
double d = vign_pixels[i];
if (d > max_vign_corr) d = max_vign_corr;
......@@ -3487,8 +3892,11 @@ public class QuadCLTCPU {
}
} else { // assuming GR/BG pattern // not used in lwir
System.out.println("Applying fixed color gain correction parameters: Gr="+
clt_parameters.novignetting_r+", Gg="+clt_parameters.novignetting_g+", Gb="+clt_parameters.novignetting_b);
if (debugLevel > -2) {
System.out.println("Applying fixed color gain correction parameters: Gr="+
clt_parameters.novignetting_r+", Gg="+clt_parameters.novignetting_g+", Gb="+clt_parameters.novignetting_b);
}
/// float [] pixels=(float []) imp_srcs[srcChannel].getProcessor().getPixels();
/// int width = imp_srcs[srcChannel].getWidth();
/// int height = imp_srcs[srcChannel].getHeight();
......@@ -3508,7 +3916,9 @@ public class QuadCLTCPU {
}
}
// temporary applying scaleExposures[srcChannel] here, setting it to all 1.0
System.out.println("Temporarily applying scaleExposures[] here - 1" );
if (debugLevel > -2) {
System.out.println("Temporarily applying scaleExposures[] here - 1" );
}
for (int srcChannel=0; srcChannel<channelFiles.length; srcChannel++){
if (!is_lwir) {
float [] pixels=(float []) imp_srcs[srcChannel].getProcessor().getPixels();
......@@ -8246,6 +8656,10 @@ public class QuadCLTCPU {
}
}
}
if (geometryCorrection instanceof ErsCorrection) {
((ErsCorrection) geometryCorrection).setupERSfromExtrinsics();
}
return true; // (comp_diff < (adjust_poly ? min_poly_update : min_sym_update));
}
......@@ -11817,10 +12231,31 @@ public class QuadCLTCPU {
boolean updateStatus,
int debugLevel)
{
final int tilesX = tp.getTilesX();
final int tilesY = tp.getTilesY();
// final int transform_size =clt_parameters.transform_size;
CLTPass3d scan = tp.clt_3d_passes.get(scanIndex);
setPassAvgRBGA( // get image from a single pass, return relative path for x3d // USED in lwir
clt_parameters,
scan,
threadsMax, // maximal number of threads to launch
updateStatus,
debugLevel);
}
public void setPassAvgRBGA( // get image from a single pass, return relative path for x3d // USED in lwir
CLTParameters clt_parameters,
// int scanIndex,
CLTPass3d scan,
int threadsMax, // maximal number of threads to launch
boolean updateStatus,
int debugLevel)
{
// final int tilesX = tp.getTilesX();
// final int tilesY = tp.getTilesY();
final int tilesX = scan.getTileProcessor().getTilesX();
final int tilesY = scan.getTileProcessor().getTilesY();
// final int transform_size =clt_parameters.transform_size;
// CLTPass3d scan = tp.clt_3d_passes.get(scanIndex);
double [][][][] texture_tiles = scan.texture_tiles;
if (texture_tiles == null) return;
int num_layers = 0;
......
......@@ -80,30 +80,22 @@ public class TileProcessor {
public boolean [] rig_post_poles_sel = null; // Rig tile selection after processing poles
public double [][] main_ds_ml = null; // main camera DSI restored from the COMBO-DSI file to generate ML test files
public boolean monochrome = false; // these are monochrome images
public boolean lwir = false; // all monochrome are lwir
private boolean is_aux = false; // this camera is aux
public int clt_3d_passes_size = 0; //clt_3d_passes size after initial processing
public int clt_3d_passes_rig_size = 0; //clt_3d_passes size after initial processing and rig processing
private int tilesX;
private int tilesY;
private double corr_magic_scale = 0.85; // reported correlation offset vs. actual one (not yet understood)
private double trustedCorrelation = 4.0; // trusted measured disparity difference (before scaling)
private double maxOverexposure = 0.5;
private int tileSize = 8; // number of linear pixels in a tile (tile is square tileSize*tileSize)
int superTileSize = 8; // number of linear tiles in a super-tile (supertile is square superTileSize*superTileSize tiles
// or (superTileSize*tileSize) * (superTileSize*tileSize) pixels, currently 64x64 pixels)
double [][] periodics = null;
public int threadsMax = 100; // maximal number of frames to run
public int globalDebugLevel = 0;
public boolean monochrome = false; // these are monochrome images
public boolean lwir = false; // all monochrome are lwir
private boolean is_aux = false; // this camera is aux
public int clt_3d_passes_size = 0; //clt_3d_passes size after initial processing
public int clt_3d_passes_rig_size = 0; //clt_3d_passes size after initial processing and rig processing
private int tilesX;
private int tilesY;
private double corr_magic_scale = 0.85; // reported correlation offset vs. actual one (not yet understood)
private double trustedCorrelation = 4.0; // trusted measured disparity difference (before scaling)
private double maxOverexposure = 0.5;
private int tileSize = 8; // number of linear pixels in a tile (tile is square tileSize*tileSize)
int superTileSize = 8; // number of linear tiles in a super-tile (supertile is square superTileSize*superTileSize tiles
double [][] periodics = null;
public int threadsMax = 100; // maximal number of frames to run
public int globalDebugLevel = 0;
public double [][] dbg_filtered_disp_strength;
public double [][] getPeriodcs(){
// todo: add calculation (if null) and reset
return this.periodics;
}
// All parameters are set only once, during instantiation
public TileProcessor(
......@@ -131,6 +123,43 @@ public class TileProcessor {
this.maxOverexposure = maxOverexposure;
this.threadsMax = threadsMax;
}
public TileProcessor(TileProcessor tp) {
this.tilesX = tp.tilesX;
this.tilesY = tp.tilesY;
this.tileSize = tp.tileSize;
this.superTileSize = tp.superTileSize;
this.monochrome = tp.monochrome;
this.lwir = tp.lwir;
this.is_aux = tp.is_aux;
this.corr_magic_scale = tp.corr_magic_scale;
this.trustedCorrelation = tp.trustedCorrelation;
this.maxOverexposure = tp.maxOverexposure;
this.threadsMax = tp.threadsMax;
this.globalDebugLevel = tp.globalDebugLevel;
// next should not be needed for new instance
/*
this.periodics = ErsCorrection.clone2d(tp.periodics); not needed
this.dbg_filtered_disp_strength = tp.dbg_filtered_disp_strength;
public ArrayList <CLTPass3d> clt_3d_passes = null;
public double [][] rig_disparity_strength = null; // Disparity and strength created by a two-camera rig, with disparity scale and distortions of the main camera
public double [][] rig_pre_poles_ds = null; // Rig disparity and strength before processing poles
public double [][] rig_post_poles_ds = null; // Rig disparity and strength after processing poles
public boolean [] rig_pre_poles_sel = null; // Rig tile selection before processing poles
public boolean [] rig_post_poles_sel = null; // Rig tile selection after processing poles
public double [][] main_ds_ml = null; // main camera DSI restored from the COMBO-DSI file to generate ML test files
public int clt_3d_passes_size = 0; //clt_3d_passes size after initial processing
public int clt_3d_passes_rig_size = 0; //clt_3d_passes size after initial processing and rig processing
*/
}
public double [][] getPeriodcs(){
// todo: add calculation (if null) and reset
return this.periodics;
}
public boolean isMonochrome() {return monochrome;}
public boolean isLwir() {return lwir;}
public boolean isAux() {return is_aux;}
......@@ -8697,6 +8726,151 @@ ImageDtt.startAndJoin(threads);
titles);
}
public double [] fillNaNs(
final double [] data,
int width,
final int grow,
double diagonal_weight, // relative to ortho
int num_passes,
final int threadsMax) // maximal number of threads to launch
{
final int scan0 = ( 3* grow) / 2;
int height = data.length/width;
double wdiag = 0.25 *diagonal_weight / (diagonal_weight + 1.0);
double wortho = 0.25 / (diagonal_weight + 1.0);
final double [] neibw = {wortho, wdiag, wortho, wdiag, wortho, wdiag, wortho, wdiag};
final int tiles = width * height;
final boolean [] fixed = new boolean [tiles];
int num_fixed = 0;
double davg = 0.0;
for (int i = 0; i < tiles; i++) {
if (!Double.isNaN(data[i])) {
fixed[i] = true;
num_fixed ++;
davg+= data[i];
}
}
if (num_fixed > 0) {
davg /= num_fixed;
} else {
return null;
}
final double fdavg = davg;
final boolean [] grown = fixed.clone();
final TileNeibs tn = new TileNeibs(width, height);
growTiles(
grow, // grow tile selection by 1 over non-background tiles 1: 4 directions, 2 - 8 directions, 3 - 8 by 1, 4 by 1 more
grown,
null,
width,
height);
int num_active = 0;
for (int i = 0; i < tiles; i++) {
if (grown[i] && !fixed[i]) num_active++;
}
if (num_active == 0) {
return data.clone();
}
final int [] active = new int [num_active];
final double [] data_in = data.clone();
final double [] data_out = new double [tiles];
num_active = 0;
for (int i = 0; i < tiles; i++) {
if (grown[i] && !fixed[i]) {
active [num_active++] = i;
data_in[i] = davg; // initial value
}
}
final Thread[] threads = ImageDtt.newThreadArray(threadsMax);
// final int numThreads = threads.length;
final AtomicInteger ai = new AtomicInteger(0);
//Set initial values
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int iTile = ai.getAndIncrement(); iTile < active.length; iTile = ai.getAndIncrement()) {
int nt = active[iTile];
double s = 0.0;
int n = 0;
for (int dy = -scan0; dy <= scan0; dy++) {
for (int dx = -scan0; dx <= scan0; dx++) {
int nt1 = tn.getNeibIndex(nt, dx, dy);
if ((nt1 >=0) && fixed[nt1]) {
s += data[nt1];
n++;
}
}
}
if (n > 0) {
data_in[nt] = s/n;
} else {
data_in[nt] = fdavg;
}
}
}
};
}
ImageDtt.startAndJoin(threads);
for (int pass = 0; pass < num_passes; pass ++) {
ai.set(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int iTile = ai.getAndIncrement(); iTile < active.length; iTile = ai.getAndIncrement()) {
int nt = active[iTile];
double s = 0.0;
double sw = 0.0;
for (int dir = 0; dir < 8; dir++) {
int nt1 = tn.getNeibIndex(nt, dir);
if ((nt1 >=0) && grown[nt1]) {
if (fixed[nt1]) {
s += data[nt1] * neibw[dir];
} else {
s += data_in[nt1] * neibw[dir];
}
sw += neibw[dir];
}
}
// sw should never be 0;
s /= sw;
data_out[nt] = s;
}
}
};
}
ImageDtt.startAndJoin(threads);
if (pass < (num_passes - 1)) {
System.arraycopy(data_out,0,data_in,0,tiles);
}
}
for (int i = 0; i < tiles; i++) if (fixed[i]) {
data_out[i] = data[i];
}
/*
double [][] dbg_img = new double [4][tiles];
String [] titles = {"data", "data_out", "fixed", "grown"};
dbg_img[0] = data;
dbg_img[1] = data_out;
for (int i = 0; i < tiles; i++) {
dbg_img[2][i] = fixed[i] ? 1.0 : 0.0;
dbg_img[3][i] = grown[i] ? 1.0 : 0.0;
}
(new ShowDoubleFloatArrays()).showArrays(
dbg_img,
tilesX,
tilesY,
true,
"filled_NaNs",
titles);
*/
return data_out;
}
/* Create a Thread[] array as large as the number of processors available.
......
......@@ -8173,9 +8173,8 @@ if (debugLevel > -100) return true; // temporarily !
if (quadCLT_main.correctionsParameters.clt_batch_dsi) {
saveDSI (
//clt_parameters
);
quadCLT_main.saveDSI (
dsi);
}
if (quadCLT_main.correctionsParameters.clt_batch_save_extrinsics) {
......@@ -8206,9 +8205,116 @@ if (debugLevel > -100) return true; // temporarily !
}
public void TestInterScene(
QuadCLT quadCLT_main, // tiles should be set
// QuadCLT quadCLT_aux,
CLTParameters clt_parameters,
EyesisCorrectionParameters.DebayerParameters debayerParameters,
ColorProcParameters colorProcParameters,
ColorProcParameters colorProcParameters_aux,
CorrectionColorProc.ColorGainsParameters channelGainParameters,
EyesisCorrectionParameters.RGBParameters rgbParameters,
EyesisCorrectionParameters.EquirectangularParameters equirectangularParameters,
Properties properties,
final int threadsMax, // maximal number of threads to launch
final boolean updateStatus,
final int debugLevel) throws Exception
{
if ((quadCLT_main != null) && (quadCLT_main.getGPU() != null)) {
quadCLT_main.getGPU().resetGeometryCorrection();
quadCLT_main.gpuResetCorrVector(); // .getGPU().resetGeometryCorrectionVector();
}
// final boolean batch_mode = clt_parameters.batch_run;
this.startTime=System.nanoTime();
String [] sourceFiles0=quadCLT_main.correctionsParameters.getSourcePaths();
QuadCLT.SetChannels [] set_channels_main = quadCLT_main.setChannels(debugLevel);
if ((set_channels_main == null) || (set_channels_main.length==0)) {
System.out.println("No files to process (of "+sourceFiles0.length+")");
return;
}
QuadCLT.SetChannels [] set_channels=quadCLT_main.setChannels(debugLevel);
// String set_name = set_channels[0].set_name;
QuadCLT [] quadCLTs = new QuadCLT [set_channels.length];
for (int i = 0; i < quadCLTs.length; i++) {
quadCLTs[i] = quadCLT_main.spawnQuadCLT(
set_channels[i].set_name,
clt_parameters,
colorProcParameters, //
threadsMax,
debugLevel);
// temporarily fix wrong sign:
ErsCorrection ers = (ErsCorrection) (quadCLTs[i].getGeometryCorrection());
ers.setupERSfromExtrinsics();
quadCLTs[i].setDSRBG(
clt_parameters, // CLTParameters clt_parameters,
threadsMax, // int threadsMax, // maximal number of threads to launch
updateStatus, // boolean updateStatus,
debugLevel); // int debugLevel)
/// quadCLTs[i].showDSIMain();
}
double k_prev = 0.75;
double corr_scale = 0.75;
for (int i = 0; i < quadCLTs.length; i++) {
// for (int i = 1; i < quadCLTs.length; i++) {
QuadCLT qPrev = (i > 0) ? quadCLTs[i - 1] : null;
double [][][] pair_sets = quadCLTs[i].get_pair(
k_prev,
qPrev,
corr_scale,
1); // -1); // int debug_level);
// old code
/*
double [][] dbg_img = quadCLTs[i].test_back_forth_debug(
k_prev,
qPrev,
corr_scale,
1); // -1); // int debug_level);
double [][] ds0 = {dbg_img[6],dbg_img[2]};
double [][] ds1 = {dbg_img[7],dbg_img[3]};
double [] disparity = quadCLTs[i].dsi[TwoQuadCLT.DSI_DISPARITY_MAIN];
double [] strength = quadCLTs[i].dsi[TwoQuadCLT.DSI_STRENGTH_MAIN];
double disparity_min = -1.0;
double disparity_max = 1.0;
quadCLTs[i].getOpticalFlow(
disparity_min,
disparity_max,
ds0,
ds1,
1); // int debugLevel)
*/
}
/*
for (int i = 0; i < quadCLTs.length; i++) {
quadCLTs[i].saveInterProperties(// save properties for interscene processing (extrinsics, ers, ...)
null, // String path, // full name with extension or w/o path to use x3d directory
debugLevel);
}
*/
System.out.println("End of test");
}
/*
* public double [][] getDSRBG (){
return dsrbg;
}
*/
public void batchLwirRig(
// GPUTileProcessor.GpuQuad gpuQuad_main, // may be null if GPU for MAIN is not use3d
// GPUTileProcessor.GpuQuad gpuQuad_aux, // may be null if GPU for AUX is not use3d
QuadCLT quadCLT_main, // tiles should be set
QuadCLT quadCLT_aux,
CLTParameters clt_parameters,
......@@ -8295,7 +8401,7 @@ if (debugLevel > -100) return true; // temporarily !
System.out.println("Building basic DSI for the main camera image set "+quadCLT_main.image_name+
", pass "+(num_adjust_main+1)+" of "+adjust_main);
}
//Generates background image in model tree - should be done later, after adjustment (It is overwritten later, so OK)
quadCLT_main.preExpandCLTQuad3d( // returns ImagePlus, but it already should be saved/shown
imp_srcs_main, // [srcChannel], // should have properties "name"(base for saving results), "channel","path"
saturation_imp_main, // boolean [][] saturation_imp, // (near) saturated pixels or null
......@@ -8369,7 +8475,7 @@ if (debugLevel > -100) return true; // temporarily !
dsi[DSI_DISPARITY_MAIN] = dsi_ly[0];
dsi[DSI_STRENGTH_MAIN] = dsi_ly[1];
// if (quadCLT_main.correctionsParameters.clt_batch_dsi) { // Should be always enabled ?
saveDSIMain ();
quadCLT_main.saveDSIMain (dsi);
// }
// clear memory for main
quadCLT_main.tp.resetCLTPasses();
......@@ -8462,7 +8568,8 @@ if (debugLevel > -100) return true; // temporarily !
dsi[DSI_DISPARITY_MAIN] = main_last_scan[0];
dsi[DSI_STRENGTH_MAIN] = main_last_scan[1];
if (quadCLT_main.correctionsParameters.clt_batch_dsi) { // Should be always enabled ?
saveDSIMain ();
quadCLT_main.saveDSIMain (
dsi);
}
......@@ -8535,9 +8642,8 @@ if (debugLevel > -100) return true; // temporarily !
}
// save assigned disparity also? - with "-DSI_COMBO" suffix
if (quadCLT_main.correctionsParameters.clt_batch_dsi) {
saveDSI (
//clt_parameters
);
quadCLT_main.saveDSI (
dsi);
}
}
......@@ -8545,7 +8651,9 @@ if (debugLevel > -100) return true; // temporarily !
} else { // if (quadCLT_main.correctionsParameters.clt_batch_explore) {
int num_restored = 0;
try {
num_restored = restoreDSI(DSI_MAIN_SUFFIX); // "-DSI_COMBO", "-DSI_MAIN"
num_restored = quadCLT_main.restoreDSI(DSI_MAIN_SUFFIX, // "-DSI_COMBO", "-DSI_MAIN"
dsi);
} catch (Exception e) {
}
......@@ -8556,7 +8664,15 @@ if (debugLevel > -100) return true; // temporarily !
null, // String path, // full name with extension or w/o path to use x3d directory
null, // Properties properties, // if null - will only save extrinsics)
debugLevel);
quadCLT_main.saveInterProperties( // save properties for interscene processing (extrinsics, ers, ...)
null, // String path, // full name with extension or w/o path to use x3d directory
// null, // Properties properties, // if null - will only save extrinsics)
debugLevel);
}
if (quadCLT_main.correctionsParameters.clt_batch_save_all) {
saveProperties(
null, // String path, // full name with extension or w/o path to use x3d directory
......@@ -8721,7 +8837,9 @@ if (debugLevel > -100) return true; // temporarily !
// dsi[DSI_STRENGTH_AUX] = aux_last_scan[1]; // incompatible dimensions
dsi_aux_from_main[QuadCLT.FGBG_AUX_DISP] = aux_last_scan[0];
dsi_aux_from_main[QuadCLT.FGBG_AUX_STR] = aux_last_scan[1];
saveDSIGTAux(); // GT from main and AUX DS
quadCLT_main.saveDSIGTAux( // GT from main and AUX DS
quadCLT_aux,
dsi_aux_from_main);
quadCLT_aux.tp.resetCLTPasses();
}
//
......@@ -8740,10 +8858,15 @@ if (debugLevel > -100) return true; // temporarily !
if (quadCLT_main.correctionsParameters.clt_batch_save_extrinsics) {
saveProperties(
saveProperties( // uses global quadCLT_main
null, // String path, // full name with extension or w/o path to use x3d directory
null, // Properties properties, // if null - will only save extrinsics)
debugLevel);
quadCLT_main.saveInterProperties( // save properties for interscene processing (extrinsics, ers, ...)
null, // String path, // full name with extension or w/o path to use x3d directory
// null, // Properties properties, // if null - will only save extrinsics)
debugLevel);
}
if (quadCLT_main.correctionsParameters.clt_batch_save_all) {
saveProperties(
......@@ -8767,45 +8890,27 @@ if (debugLevel > -100) return true; // temporarily !
IJ.d2s(0.000000001*(System.nanoTime()-this.startTime),3)+" sec, --- Free memory="+Runtime.getRuntime().freeMemory()+" (of "+Runtime.getRuntime().totalMemory()+")");
}
public void saveDSI(
// CLTParameters clt_parameters
)
{
String x3d_path= quadCLT_main.correctionsParameters.selectX3dDirectory( // for x3d and obj
quadCLT_main.correctionsParameters.getModelName(quadCLT_main.image_name), // quad timestamp. Will be ignored if correctionsParameters.use_x3d_subdirs is false
quadCLT_main.correctionsParameters.x3dModelVersion,
true, // smart,
true); //newAllowed, // save
String title = quadCLT_main.image_name+DSI_COMBO_SUFFIX;
ImagePlus imp = (new ShowDoubleFloatArrays()).makeArrays(dsi,quadCLT_main.tp.getTilesX(), quadCLT_main.tp.getTilesY(), title, DSI_SLICES);
quadCLT_main.eyesisCorrections.saveAndShow(
imp, // ImagePlus imp,
x3d_path, // String path,
false, // boolean png,
false, // boolean show,
0); // int jpegQuality)
}
public void showDSI()
{
String title = quadCLT_main.image_name+DSI_COMBO_SUFFIX;
(new ShowDoubleFloatArrays()).showArrays(dsi,quadCLT_main.tp.getTilesX(), quadCLT_main.tp.getTilesY(), true, title, DSI_SLICES);
quadCLT_main.showDSI(dsi);
}
/*
public void saveDSIMain(
)
QuadCLT quadCLT,
double [][] dsi) // DSI_SLICES.length
{
String x3d_path= quadCLT_main.correctionsParameters.selectX3dDirectory( // for x3d and obj
quadCLT_main.correctionsParameters.getModelName(quadCLT_main.image_name), // quad timestamp. Will be ignored if correctionsParameters.use_x3d_subdirs is false
quadCLT_main.correctionsParameters.x3dModelVersion,
String x3d_path= quadCLT.correctionsParameters.selectX3dDirectory( // for x3d and obj
quadCLT.correctionsParameters.getModelName(quadCLT.image_name), // quad timestamp. Will be ignored if correctionsParameters.use_x3d_subdirs is false
quadCLT.correctionsParameters.x3dModelVersion,
true, // smart,
true); //newAllowed, // save
String title = quadCLT_main.image_name+"-DSI_MAIN";
String title = quadCLT.image_name+"-DSI_MAIN";
String [] titles = {DSI_SLICES[DSI_DISPARITY_MAIN], DSI_SLICES[DSI_STRENGTH_MAIN]};
double [][] dsi_main = {dsi[DSI_DISPARITY_MAIN], dsi[DSI_STRENGTH_MAIN]};
ImagePlus imp = (new ShowDoubleFloatArrays()).makeArrays(dsi_main,quadCLT_main.tp.getTilesX(), quadCLT_main.tp.getTilesY(), title, titles);
quadCLT_main.eyesisCorrections.saveAndShow(
ImagePlus imp = (new ShowDoubleFloatArrays()).makeArrays(dsi_main,quadCLT.tp.getTilesX(), quadCLT.tp.getTilesY(), title, titles);
quadCLT.eyesisCorrections.saveAndShow(
imp, // ImagePlus imp,
x3d_path, // String path,
false, // boolean png,
......@@ -8813,8 +8918,12 @@ if (debugLevel > -100) return true; // temporarily !
0); // int jpegQuality)
}
// Save GT from main and AUX calculated DS
public void saveDSIGTAux()
public void saveDSIGTAux(
QuadCLT quadCLT_main,
QuadCLT quadCLT_aux,
double [][] dsi_aux_from_main)
{
String x3d_path= quadCLT_main.correctionsParameters.selectX3dDirectory( // for x3d and obj
quadCLT_main.correctionsParameters.getModelName(quadCLT_main.image_name), // quad timestamp. Will be ignored if correctionsParameters.use_x3d_subdirs is false
......@@ -8826,7 +8935,7 @@ if (debugLevel > -100) return true; // temporarily !
// double [][] dsi_main = {dsi[DSI_DISPARITY_MAIN], dsi[DSI_STRENGTH_MAIN]};
ImagePlus imp = (new ShowDoubleFloatArrays()).makeArrays(
this.dsi_aux_from_main, // dsi_main,
dsi_aux_from_main, // dsi_main,
quadCLT_aux.tp.getTilesX(),
quadCLT_aux.tp.getTilesY(),
title,
......@@ -8839,44 +8948,9 @@ if (debugLevel > -100) return true; // temporarily !
0); // int jpegQuality)
}
public int restoreDSI(String suffix){ // "-DSI_COMBO", "-DSI_MAIN" (DSI_COMBO_SUFFIX, DSI_MAIN_SUFFIX)
String x3d_path= quadCLT_main.correctionsParameters.selectX3dDirectory( // for x3d and obj
quadCLT_main.correctionsParameters.getModelName(quadCLT_main.image_name), // quad timestamp. Will be ignored if correctionsParameters.use_x3d_subdirs is false
quadCLT_main.correctionsParameters.x3dModelVersion,
true, // smart,
true); //newAllowed, // save
String file_path = x3d_path + Prefs.getFileSeparator() + quadCLT_main.image_name + suffix + ".tiff";
ImagePlus imp = null;
try {
imp = new ImagePlus(file_path);
} catch (Exception e) {
System.out.println ("Failed to open "+file_path);
return -1;
}
System.out.println("restoreDSI(): got "+imp.getStackSize()+" slices");
if (imp.getStackSize() < 2) {
System.out.println ("Failed to read "+file_path);
return -1;
}
int num_slices_read = 0;
ImageStack dsi_stack = imp.getStack();
for (int nl = 0; nl < imp.getStackSize(); nl++) {
for (int n = 0; n < DSI_SLICES.length; n++)
if (TwoQuadCLT.DSI_SLICES[n].equals(dsi_stack.getSliceLabel(nl + 1))) {
float [] fpixels = (float[]) dsi_stack.getPixels(nl + 1);
dsi[n] = new double [fpixels.length];
for (int i = 0; i < fpixels.length; i++) {
dsi[n][i] = fpixels[i];
}
num_slices_read ++;
break;
}
}
return num_slices_read;
}
public void showDSIMain()
public void showDSIMain(
QuadCLT quadCLT_main,
double [][] dsi)
{
String title = quadCLT_main.image_name+"-DSI_MAIN";
String [] titles = {DSI_SLICES[DSI_DISPARITY_MAIN], DSI_SLICES[DSI_STRENGTH_MAIN]};
......@@ -8884,6 +8958,7 @@ if (debugLevel > -100) return true; // temporarily !
(new ShowDoubleFloatArrays()).showArrays(dsi_main,quadCLT_main.tp.getTilesX(), quadCLT_main.tp.getTilesY(), true, title, titles);
}
*/
public double [][] getRigDSI(
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment