Commit 28f7044c authored by Andrey Filippov's avatar Andrey Filippov

Ground truth from EO to LWIR

parent e2478b97
......@@ -1544,8 +1544,8 @@ public class CLTParameters {
if (properties.getProperty(prefix+"max_corr_radius")!=null) this.max_corr_radius=Double.parseDouble(properties.getProperty(prefix+"max_corr_radius"));
// for compatibility with old settings
if (properties.getProperty(prefix+"enhortho_width")!=null) this.img_dtt.enhortho_width=Integer.parseInt(properties.getProperty(prefix+"enhortho_width"));
if (properties.getProperty(prefix+"enhortho_scale")!=null) this.img_dtt.enhortho_scale=Double.parseDouble(properties.getProperty(prefix+"enhortho_scale"));
if (properties.getProperty(prefix+"enhortho_width")!=null) this.img_dtt.setEnhOrthoWidth(Integer.parseInt(properties.getProperty(prefix+"enhortho_width")));
if (properties.getProperty(prefix+"enhortho_scale")!=null) this.img_dtt.setEnhOrthoScale(Double.parseDouble(properties.getProperty(prefix+"enhortho_scale")));
// for compatibility with old settings
if (properties.getProperty(prefix+"max_corr_double")!=null) this.max_corr_double=Boolean.parseBoolean(properties.getProperty(prefix+"max_corr_double"));
......
......@@ -124,6 +124,18 @@ public class EyesisCorrections {
false,
false);
}
public void initPixelMapping(
int debugLevel) {
String [] sensorPaths=correctionsParameters.selectSensorFiles(this.debugLevel);
this.pixelMapping=new PixelMapping(
sensorPaths,
correctionsParameters.firstSubCameraConfig, // int first_channel, // 0 - old way
correctionsParameters.numSubCameras, // int num_channels, // 0 - any
true, // boolean update_channel, // false (replace file channel with effective channel (subtract first_channel)
debugLevel);
}
public void initSensorFiles(
int debugLevel,
boolean missing_ok,
......@@ -132,6 +144,10 @@ public class EyesisCorrections {
){
this.sharpKernelPaths=null;
this.smoothKernelPaths=null;
if (this.pixelMapping == null) { // NOTE: will not notice switching sensor files !
initPixelMapping(debugLevel);
}
/*
String [] sensorPaths=correctionsParameters.selectSensorFiles(this.debugLevel);
this.pixelMapping=new PixelMapping(
sensorPaths,
......@@ -139,6 +155,7 @@ public class EyesisCorrections {
correctionsParameters.numSubCameras, // int num_channels, // 0 - any
true, // boolean update_channel, // false (replace file channel with effective channel (subtract first_channel)
debugLevel);
*/
if (all_sensors) {
this.usedChannels = new boolean [this.pixelMapping.sensors.length];
for (int i = 0; i < this.usedChannels.length; i++) {
......
......@@ -88,6 +88,7 @@ import com.elphel.imagej.lwir.LwirReader;
import com.elphel.imagej.readers.EyesisTiff;
import com.elphel.imagej.tensorflow.TensorflowInferModel;
import com.elphel.imagej.tileprocessor.DttRad2;
import com.elphel.imagej.tileprocessor.GeometryCorrection;
import com.elphel.imagej.tileprocessor.ImageDtt;
import com.elphel.imagej.tileprocessor.MLStats;
import com.elphel.imagej.tileprocessor.QuadCLT;
......@@ -728,6 +729,7 @@ private Panel panel1,
addButton("AUX planes", panelLWIR, color_conf_process_aux);
addButton("AUX ASSIGN", panelLWIR, color_process_aux);
addButton("AUX OUT 3D", panelLWIR, color_process_aux);
addButton("Main to AUX", panelLWIR, color_process_aux);
addButton("LWIR_TEST", panelLWIR, color_conf_process);
addButton("LWIR_ACQUIRE", panelLWIR, color_conf_process);
......@@ -3958,12 +3960,12 @@ private Panel panel1,
IJ.showMessage("Warning",msg);
return;
}
/*
EYESIS_CORRECTIONS.initSensorFiles(DEBUG_LEVEL,
true, // true - ignore missing files
true, // boolean all_sensors,
COLOR_PROC_PARAMETERS.correct_vignetting); //boolean correct_vignetting
*/
QUAD_CLT.resetGeometryCorrection();
QUAD_CLT.initGeometryCorrection(DEBUG_LEVEL+2);
......@@ -4450,6 +4452,10 @@ private Panel panel1,
return;
}
}
@SuppressWarnings("unused")
QuadCLT dbg_qc_main = QUAD_CLT;
@SuppressWarnings("unused")
QuadCLT dbg_qc_aux = QUAD_CLT_AUX;
///========================================
int num_infinity_corr = infinity_corr? CLT_PARAMETERS.inf_repeat : 1;
......@@ -4684,7 +4690,12 @@ private Panel panel1,
EYESIS_CORRECTIONS.setDebug(DEBUG_LEVEL);
clt3d_aux(adjust_extrinsics, adjust_poly);
return;
} else if (label.equals("Main to AUX")) {
DEBUG_LEVEL=MASTER_DEBUG_LEVEL;
EYESIS_CORRECTIONS.setDebug(DEBUG_LEVEL);
mainToAux();
return;
//
} else if (label.equals("CLT planes")) {
DEBUG_LEVEL=MASTER_DEBUG_LEVEL;
EYESIS_CORRECTIONS.setDebug(DEBUG_LEVEL);
......@@ -5106,6 +5117,295 @@ private Panel panel1,
// End of buttons code
}
}
/* ======================================================================== */
public boolean mainToAux() {
if (QUAD_CLT == null){
QUAD_CLT = new QuadCLT (
QuadCLT.PREFIX,
PROPERTIES,
EYESIS_CORRECTIONS,
CORRECTION_PARAMETERS);
if (DEBUG_LEVEL > 0){
System.out.println("Created new QuadCLT instance, will need to read CLT kernels");
}
}
if (QUAD_CLT_AUX == null){
if (EYESIS_CORRECTIONS_AUX == null) {
EYESIS_CORRECTIONS_AUX = new EyesisCorrections(SYNC_COMMAND.stopRequested,CORRECTION_PARAMETERS.getAux());
}
QUAD_CLT_AUX = new QuadCLT (
QuadCLT.PREFIX_AUX,
PROPERTIES,
EYESIS_CORRECTIONS_AUX,
CORRECTION_PARAMETERS.getAux());
if (DEBUG_LEVEL > 0){
System.out.println("Created new QuadCLT instance, will need to read CLT kernels for aux camera");
}
}
@SuppressWarnings("unused")
QuadCLT dbg_QUAD_CLT = QUAD_CLT;
@SuppressWarnings("unused")
QuadCLT dbg_QUAD_CLT_AUX = QUAD_CLT_AUX;
ImagePlus imp_sel = WindowManager.getCurrentImage();
if (imp_sel==null){
IJ.showMessage("Error","There are no images open\nProcess canceled");
return false;
}
ImageStack stack_sel = imp_sel.getStack();
String [] labels = stack_sel.getSliceLabels();
int indx = 0;
for (int i = 0; i < labels.length; i++) {
if (labels[i] !=null) indx++;
}
boolean just2 = (indx == 2) && (labels[0] != null) && (labels[1] != null);
String [] choices = {"---", "disparity","strength"};
double min_strength = 0.18; // use some configurable parameters
boolean use_wnd = true;
boolean split_fg_bg = true;
double split_fbg_rms = 0.2; // split small source samples tp FG/BG if all aux tile RMS exceeds this value
GenericDialog gd = new GenericDialog("Select disparity and strength slices");
indx = 0;
for (int i = 0; i < labels.length; i++) {
if (labels[i] !=null) {
gd.addChoice(((indx++) + 1)+": " +labels[i], choices, choices[just2 ? (i + 1) : 0]);
}
}
gd.addMessage("--- main-to-aux depth map parameters ---");
gd.addNumericField("Minimal EO correlation strength", min_strength, 3, 6, "");
gd.addCheckbox("Use window for AUX tiles to reduce weight of the hi-res tiles near low-res tile boundaries" , use_wnd);
gd.addCheckbox("Split FG and BG if hi-res disparity varies for the same low-res tile", split_fg_bg);
gd.addNumericField("Aux disparity thershold to split FG and BG", split_fbg_rms, 3, 6, "");
WindowTools.addScrollBars(gd);
gd.showDialog();
if (gd.wasCanceled()) return false;
int [] selections = new int[indx];
indx = 0;
for (int i = 0; i < selections.length; i++) {
selections[i] = gd.getNextChoiceIndex();
}
min_strength = gd.getNextNumber();
use_wnd = gd.getNextBoolean();
split_fg_bg = gd.getNextBoolean();
split_fbg_rms = gd.getNextNumber();
int index_disparity = -1, index_strength=-1;
indx = 0;
for (int i = 0; i < labels.length; i++) if (labels[i] != null) {
if ((index_disparity < 0) && (selections[indx] == 1)) {
index_disparity = i;
}
if ((index_strength < 0) && (selections[indx] == 2)) {
index_strength = i;
}
if ((index_disparity >= 0 ) &&(index_strength >= 0 )) {
break;
}
indx++;
}
int width = imp_sel.getWidth();
int height = imp_sel.getHeight();
String title = imp_sel.getTitle()+"-DS";
float [][] f_ds = new float [2][];
f_ds[0] = (float[]) stack_sel.getPixels(index_disparity+1);
f_ds[1] = (float[]) stack_sel.getPixels(index_strength+1);
double [][] ds = new double [2][width*height];
for (int l = 0; l < ds.length; l++) {
for (int i = 0; i < ds[l].length; i++) {
ds[l][i] = f_ds[l][i];
}
}
String [] titles = {"disparity","strength"};
(new ShowDoubleFloatArrays()) .showArrays(ds, width, height, true, title, titles);
int tile_size = CLT_PARAMETERS.transform_size;
int [] wh_aux = QUAD_CLT_AUX.getGeometryCorrection().getSensorWH();
int tilesX_aux = wh_aux[0] / tile_size;
int tilesY_aux = wh_aux[1] / tile_size;
// int num_slices = split_fg_bg? 7:2;
String [] fgbg_titles = {"disparity","strength", "rms","rms-split","fg-disp","fg-str","bg-disp","bg-str"};
String [] rslt_titles = split_fg_bg ? fgbg_titles :titles;
double [][] ds_aux = DepthMapMainToAux(
ds, // double [][] ds,
QUAD_CLT.getGeometryCorrection(), // GeometryCorrection geometryCorrection_main,
QUAD_CLT_AUX.getGeometryCorrection(), // GeometryCorrection geometryCorrection_aux,
CLT_PARAMETERS,
min_strength, // double min_strength,
use_wnd,
split_fg_bg,
split_fbg_rms,
DEBUG_LEVEL); // int debug_level
(new ShowDoubleFloatArrays()).showArrays(ds_aux, tilesX_aux, tilesY_aux, true, title+"_TOAUX", rslt_titles);
return true;
}
public double [][] DepthMapMainToAux(
double [][] ds,
GeometryCorrection geometryCorrection_main,
GeometryCorrection geometryCorrection_aux,
CLTParameters clt_Parameters,
double min_strength,
boolean use_wnd,
boolean split_fg_bg,
double split_fbg_rms,
int debug_level
){
class DS{
double disparity; // gt disparity
double strength; // gt strength
int tx; // gt tile x
int ty; // gt tile x
double fx; // fractional aux tile X (0.0..1.0) for optional window
double fy; // fractional aux tile Y (0.0..1.0) for optional window
// DS (double disparity, double strength){
// this.disparity = disparity;
// this.strength = strength;
// }
DS (double disparity, double strength, int tx, int ty, double fx, double fy){
this.disparity = disparity;
this.strength = strength;
this.tx = tx;
this.ty = ty;
this.fx = fx;
this.fy = fy;
}
@Override
public String toString() {
return String.format("Disparity (str) = % 6f (%5f), tx=%d ty=%d fx=%5f fy=%5f\n", disparity, strength,tx,ty,fx,fy);
}
}
int tile_size = clt_Parameters.transform_size;
int [] wh_main = geometryCorrection_main.getSensorWH();
int [] wh_aux = geometryCorrection_aux.getSensorWH();
int tilesX_main = wh_main[0] / tile_size;
int tilesY_main = wh_main[1] / tile_size;
int tilesX_aux = wh_aux[0] / tile_size;
int tilesY_aux = wh_aux[1] / tile_size;
ArrayList<ArrayList<DS>> ds_list = new ArrayList<ArrayList<DS>>();
for (int nt = 0; nt < tilesX_aux * tilesY_aux; nt++) {
ds_list.add(new ArrayList<DS>());
}
for (int ty = 0; ty < tilesY_main; ty++) {
double centerY = ty * tile_size + tile_size/2;
for (int tx = 0; tx < tilesX_main; tx++) {
int nt = ty*tilesX_main + tx;
double centerX = tx * tile_size + tile_size/2;
double disparity = ds[0][nt];
double strength = ds[1][nt];
if ((strength >= min_strength) && !Double.isNaN(disparity)) {
double [] dpxpy_aux = geometryCorrection_aux.getFromOther(
geometryCorrection_main, // GeometryCorrection other_gc,
centerX, // double other_px,
centerY, // double other_py,
disparity); // double other_disparity)
double fx = dpxpy_aux[1]/tile_size;
double fy = dpxpy_aux[2]/tile_size;
int tx_aux = (int) Math.floor(fx);
int ty_aux = (int) Math.floor(fy);
fx -= tx_aux;
fy -= ty_aux;
if ((ty_aux >= 0) && (ty_aux < tilesY_aux) && (tx_aux >= 0) && (tx_aux < tilesX_aux)) {
int nt_aux = ty_aux * tilesX_aux + tx_aux;
ds_list.get(nt_aux).add(new DS(dpxpy_aux[0], strength, tx, ty, fx, fy));
}
}
}
}
// simple average (ignoring below minimal)
int num_slices = split_fg_bg? 8:2;
double [][] ds_aux_avg = new double [num_slices][tilesX_aux * tilesY_aux];
for (int ty = 0; ty < tilesY_aux; ty++) {
for (int tx = 0; tx < tilesX_aux; tx++) {
if ((ty == 4) && (tx == 12)) {
System.out.println("tx = "+tx+", ty = "+ty);
}
int nt = ty * tilesX_aux + tx;
ds_aux_avg[0][nt] = Double.NaN;
ds_aux_avg[1][nt] = 0.0;
if(ds_list.get(nt).isEmpty()) continue;
Collections.sort(ds_list.get(nt), new Comparator<DS>() {
@Override
public int compare(DS lhs, DS rhs) {
return rhs.disparity > lhs.disparity ? -1 : (rhs.disparity < lhs.disparity ) ? 1 : 0;
}
});
double sw = 0.0, swd = 0.0, swd2 = 0.0;
for (DS dsi: ds_list.get(nt)) {
double w = dsi.strength;
if (use_wnd) {
w *= Math.sin(Math.PI * (dsi.fx + 0.5/tile_size)) * Math.sin(Math.PI * (dsi.fy + 0.5/tile_size));
}
sw += w;
double wd = w * dsi.disparity;
swd += wd;
swd2 += wd * dsi.disparity;
}
ds_aux_avg[0][nt] = swd/sw;
ds_aux_avg[1][nt] = sw/ds_list.get(nt).size();
if (split_fg_bg) {
ds_aux_avg[2][nt] = Math.sqrt( (swd2 * sw - swd * swd) / (sw * sw));
ds_aux_avg[3][nt] = ds_aux_avg[2][nt]; // rms
ds_aux_avg[4][nt] = ds_aux_avg[0][nt]; // fg disp
ds_aux_avg[5][nt] = ds_aux_avg[1][nt]; // fg strength
ds_aux_avg[6][nt] = ds_aux_avg[0][nt]; // bg disp
ds_aux_avg[7][nt] = ds_aux_avg[1][nt]; // bg strength
if (ds_aux_avg[2][nt] >= split_fbg_rms) {
// splitting while minimizing sum of 2 squared errors
double [][] swfb = new double [2][ds_list.get(nt).size() -1];
double [][] swdfb = new double [2][ds_list.get(nt).size() -1];
double [] s2fb = new double [ds_list.get(nt).size() -1];
for (int n = 0; n < s2fb.length; n++) { // split position
double [] s2 = new double[2];
for (int i = 0; i <= s2fb.length; i++) {
int fg = (i > n)? 1 : 0; // 0 - bg, 1 - fg
DS dsi = ds_list.get(nt).get(i);
double w = dsi.strength;
if (use_wnd) {
w *= Math.sin(Math.PI * dsi.fx) * Math.sin(Math.PI * dsi.fy);
}
swfb[fg][n] += w;
double wd = w * dsi.disparity;
swdfb[fg][n] += wd;
s2[fg] += wd * dsi.disparity;
}
s2fb[n] = ((s2[0] * swfb[0][n] - swdfb[0][n] * swdfb[0][n]) / swfb[0][n] +
(s2[1] * swfb[1][n] - swdfb[1][n] * swdfb[1][n]) / swfb[1][n]) / (swfb[0][n] + swfb[1][n]);
}
// now find the n with lowest s2fb and use it to split fg/bg. Could be done in a single pass, but with saved arrays
// it is easier to verify
int nsplit = 0;
for (int i = 1; i < s2fb.length; i++) if (s2fb[i] < s2fb[nsplit]) {
nsplit = i;
}
ds_aux_avg[3][nt] = s2fb[nsplit]; // rms split
ds_aux_avg[4][nt] = swdfb[1][nsplit] / swfb[1][nsplit] ; // fg disp
ds_aux_avg[5][nt] = swfb[1][nsplit]/ (s2fb.length - nsplit) ; // fg strength
ds_aux_avg[6][nt] = swdfb[0][nsplit] / swfb[0][nsplit] ; // bg disp
ds_aux_avg[7][nt] = swfb[0][nsplit]/ (nsplit + 1) ; // bg strength
}
}
}
}
return ds_aux_avg;
}
//getGeometryCorrection
///data_ssd/lwir3d/results/saved/1562390490_233403/v11/
/* ======================================================================== */
public String getSaveCongigPath() {
String configPath=null;
......@@ -9879,8 +10179,8 @@ G= Y +Pr*(- 2*Kr*(1-Kr))/Kg + Pb*(-2*Kb*(1-Kb))/Kg
gd.addNumericField("JPEG scale (%)", 100* processParameters.JPEG_scale,0);
gd.addCheckbox ("Save current settings with results", processParameters.saveSettings);
gd.addCheckbox ("Update ImageJ status", UPDATE_STATUS);
WindowTools.addScrollBars(gd);
gd.addNumericField("Debug Level:", MASTER_DEBUG_LEVEL, 0);
WindowTools.addScrollBars(gd);
gd.showDialog();
if (gd.wasCanceled()) return false;
processParameters.eyesisMode= gd.getNextBoolean();
......
......@@ -34,21 +34,23 @@ import ij.IJ;
*/
public class GeometryCorrection {
static final double FOCAL_LENGTH = 4.5; // nominal focal length - used as default and to convert editable parameters to pixels
static final double DISTORTION_RADIUS = 2.8512; // nominal distortion radius - half width of the sensor
static final double PIXEL_SIZE = 2.2; //um
// static final double FOCAL_LENGTH = 4.5; // nominal focal length - used as default and to convert editable parameters to pixels
// static final double DISTORTION_RADIUS = 2.8512; // nominal distortion radius - half width of the sensor
// static final double PIXEL_SIZE = 2.2; //um
static final String[] RIG_PAR_NAMES = {"azimuth", "tilt", "roll", "zoom", "angle", "baseline"};
public static String RIG_PREFIX = "rig-";
static double SCENE_UNITS_SCALE = 0.001;
static double SCENE_UNITS_SCALE = 0.001; // meters from mm
static String SCENE_UNITS_NAME = "m";
static final String [] CORR_NAMES = {"tilt0","tilt1","tilt2","azimuth0","azimuth1","azimuth2","roll0","roll1","roll2","roll3","zoom0","zoom1","zoom2"};
public int debugLevel = 0;
public int pixelCorrectionWidth=2592; // virtual camera center is at (pixelCorrectionWidth/2, pixelCorrectionHeight/2)
public int pixelCorrectionHeight=1936;
public double focalLength=FOCAL_LENGTH;
public double pixelSize= PIXEL_SIZE; //um
public double distortionRadius= DISTORTION_RADIUS; // mm - half width of the sensor
public double focalLength; // =FOCAL_LENGTH;
public double pixelSize; // = PIXEL_SIZE; //um
public double distortionRadius; // = DISTORTION_RADIUS; // mm - half width of the sensor
public double distortionA8=0.0; //r^8 (normalized to focal length or to sensor half width?)
public double distortionA7=0.0; //r^7 (normalized to focal length or to sensor half width?)
public double distortionA6=0.0; //r^6 (normalized to focal length or to sensor half width?)
......@@ -219,6 +221,18 @@ public class GeometryCorrection {
return extrinsic_corr;
}
public void setCorrVector(double [] dv){
setCorrVector(new CorrVector(dv));
}
public void setCorrVector(int indx, double d){
if (getCorrVector().toArray() == null) {
resetCorrVector();
}
getCorrVector().toArray()[indx] = d;
}
public void setCorrVector(CorrVector vector){
if (vector == null){
vector = new CorrVector();
......@@ -226,6 +240,11 @@ public class GeometryCorrection {
extrinsic_corr = vector;
}
public void resetCorrVector(){
extrinsic_corr = new CorrVector();
}
public boolean [] getParMask(
// boolean disparity_only,
// boolean use_disparity,
......@@ -287,12 +306,12 @@ public class GeometryCorrection {
public RigOffset () {
System.out.println("created RigOffset");
par_scales = new double [VECTOR_LENGTH];
par_scales[AUX_AZIMUTH_INDEX] = 1000.0*FOCAL_LENGTH/PIXEL_SIZE;
par_scales[AUX_TILT_INDEX] = 1000.0*FOCAL_LENGTH/PIXEL_SIZE;
par_scales[AUX_ROLL_INDEX] = 1000.0*DISTORTION_RADIUS/PIXEL_SIZE;
par_scales[AUX_ZOOM_INDEX] = 1000.0*DISTORTION_RADIUS/PIXEL_SIZE;
par_scales[AUX_AZIMUTH_INDEX] = 1000.0*focalLength/pixelSize;
par_scales[AUX_TILT_INDEX] = 1000.0*focalLength/pixelSize;
par_scales[AUX_ROLL_INDEX] = 1000.0*distortionRadius/pixelSize;
par_scales[AUX_ZOOM_INDEX] = 1000.0*distortionRadius/pixelSize;
par_scales[AUX_ANGLE_INDEX] = 1.0; // 1000.0*BASELINE/pixelSize;
par_scales[AUX_BASELINE_INDEX] = 1.0/DISTORTION_RADIUS; // pixels per disparity pixel
par_scales[AUX_BASELINE_INDEX] = 1.0/distortionRadius; // pixels per disparity pixel
}
......@@ -769,11 +788,6 @@ public class GeometryCorrection {
public Matrix getRotMatrix()
{
// Matrix [] rots = new Matrix [4];
// double [] azimuths = getAzimuths();
// double [] tilts = getTilts();
// double [] rolls = getFullRolls();
// double [] zooms = getZooms();
double ca = Math.cos(aux_azimuth);
double sa = Math.sin(aux_azimuth);
double ct = Math.cos(aux_tilt);
......@@ -1185,6 +1199,8 @@ public class GeometryCorrection {
throw new IllegalArgumentException("vector.length = "+vector.length+" != "+LENGTH);
}
this.vector = vector;
} else {
this.vector = new double[LENGTH];
}
}
/**
......@@ -1932,8 +1948,103 @@ matrix([[-0.125, -0.125, 0.125, 0.125, -0.125, 0.125, -0. , -0. , -0.
{
return ( 0.001 * this.pixelSize) / this.focalLength;
}
// get rotation matrix of the composite camera
public Matrix getCommonRotMatrix() {
double heading_rad = Math.PI / 180.0 * heading;
double elevation_rad = Math.PI / 180.0 * elevation;
double roll_rad = Math.PI / 180.0 * common_roll;
double ca = Math.cos(heading_rad);
double sa = Math.sin(heading_rad);
double ct = Math.cos(elevation_rad);
double st = Math.sin(elevation_rad);
double cr = Math.cos(roll_rad);
double sr = Math.sin(roll_rad);
double [][] a_az = { // inverted - OK
{ ca, 0.0, -sa },
{ 0.0, 1.0, 0.0},
{ sa, 0.0, ca}};
double [][] a_t = { // inverted - OK
{ 1.0, 0.0, 0.0},
{ 0.0, ct, st},
{ 0.0, -st , ct}};
double [][] a_r = { // inverted OK
{ cr, sr, 0.0},
{ -sr, cr, 0.0},
{ 0.0, 0.0, 1.0}};
Matrix rot = (new Matrix(a_r).times(new Matrix(a_t).times(new Matrix(a_az))));
return rot;
}
public Matrix getCommonTranslateMatrix() {
// * SCENE_UNITS_SCALE to get meters from mm
double [][] a_translate= {
{common_right * SCENE_UNITS_SCALE},
{common_height * SCENE_UNITS_SCALE},
{common_forward * SCENE_UNITS_SCALE}};
return new Matrix(a_translate);
}
/**
* Get true real world coordinates from pixel coordinates and nominal disparity
* In addition to getWorldCoordinates() method that has WCS oriented with the composite
* camera (this.elevation,this.heading, this.common_roll) and offset by this.common_right,
* this.common_height and this.common_forward, this method compensates this orientation and offset.
* @param px horizontal pixel coordinate (right)
* @param py vertical pixel coordinate (down)
* @param disparity nominal disparity (pixels)
* @return {x, y, z} in meters
*/
public double [] getTrueWorldCoordinates(
double px,
double py,
double disparity)
{
double [] wc = getWorldCoordinates(px,py,disparity,true);
double [][] a_wc = {{wc[0]}, {wc[1]},{wc[2]}};
Matrix xyz = new Matrix(a_wc);
Matrix rwc = (getCommonRotMatrix().times(xyz)).plus(getCommonTranslateMatrix());
return rwc.getColumnPackedCopy();
}
/**
* Get pixel disparity and coordinates from the real world coordinates (in meters)
* In addition to getImageCoordinates() method that has WCS oriented with the composite
* camera (this.elevation,this.heading, this.common_roll) and offset by this.common_right,
* this.common_height and this.common_forward, this method compensates this orientation and offset.
* @param xyz real world coordinates {x, y, z} in meters (right up, towards camera)
* @return {disparity, px, py} (right, down)
*/
public double [] getTrueImageCoordinates(
double [] xyz) // correct distortion (will need corrected background too !)
{
double [][] a_xyz = {{xyz[0]}, {xyz[1]},{xyz[2]}};
Matrix m_xyz = getCommonRotMatrix().transpose().times(((new Matrix (a_xyz)).minus(getCommonTranslateMatrix())));
return getImageCoordinates(m_xyz.getColumnPackedCopy(), true);
}
/**
* Get pixel disparity and coordinates from the other GeometryCorrection px, py,and disparity
* @param other_gc other (source) GeometryCorrection instance
* @param other_px horizontal pixel coordinate (right) in other_gc
* @param other_py vertical pixel coordinate (down) in other_gc
* @param other_disparity nominal disparity (pixels) in other_gc
* @return {disparity, px, py} (right, down) for this GeometryCorrection
*/
public double [] getFromOther(
GeometryCorrection other_gc,
double other_px,
double other_py,
double other_disparity)
{
double [] true_world_xyz = other_gc.getTrueWorldCoordinates(
other_px,
other_py,
other_disparity);
return getTrueImageCoordinates(true_world_xyz);
}
/**
* Get real world coordinates from pixel coordinates and nominal disparity
* @param px horizontal pixel coordinate (right)
......@@ -1961,6 +2072,32 @@ matrix([[-0.125, -0.125, 0.125, 0.125, -0.125, 0.125, -0. , -0. , -0.
return xyz;
}
/**
* Get pixel disparity and coordinates from the real world coordinates (in meters)
* @param xyz real world coordinates {x, y, z} in meters (right up, towards camera)
* @param correctDistortions true: correct lens distortions, false - no lens distortions
* @return {disparity, px, py} (right, down)
*/
public double [] getImageCoordinates(
double [] xyz,
boolean correctDistortions) // correct distortion (will need corrected background too !)
{
double x = xyz[0];
double y = xyz[1];
double z = xyz[2];
double disparity = -SCENE_UNITS_SCALE * this.focalLength * this.disparityRadius / (z * 0.001*this.pixelSize);
// non-distorted coordinates relative to the (0.5 * this.pixelCorrectionWidth, 0.5 * this.pixelCorrectionHeight)in mm
double pXc = x * disparity / (SCENE_UNITS_SCALE * this.disparityRadius); // pixels
double pYc =-y * disparity / (SCENE_UNITS_SCALE * this.disparityRadius); // pixels
double rND = Math.sqrt(pXc*pXc + pYc*pYc)*0.001*this.pixelSize; // mm
double rD2RND = correctDistortions?getRDistByR(rND/this.distortionRadius):1.0;
double px = pXc * rD2RND + 0.5 * this.pixelCorrectionWidth; // distorted coordinates relative to the (0.5 * this.pixelCorrectionWidth, 0.5 * this.pixelCorrectionHeight)
double py = pYc * rD2RND + 0.5 * this.pixelCorrectionHeight; // in pixels
double [] dxy = {disparity, px, py};
return dxy;
}
/**
* Find disparity for the intersection of the view ray (px, py) and a real-world plane orthogonal through the end of the
* vector norm_xyz
......@@ -2084,31 +2221,6 @@ matrix([[-0.125, -0.125, 0.125, 0.125, -0.125, 0.125, -0. , -0. , -0.
return jacobian; // xyz;
}
/**
* Get pixel disparity and coordinates from the real world coordinates (in meters)
* @param xyz real world coordinates {x, y, z} in meters (right up, towards camera)
* @param correctDistortions true: correct lens distortions, false - no lens distortions
* @return {disparity, px, py} (right, down)
*/
public double [] getImageCoordinates(
double [] xyz,
boolean correctDistortions) // correct distortion (will need corrected background too !)
{
double x = xyz[0];
double y = xyz[1];
double z = xyz[2];
double disparity = -SCENE_UNITS_SCALE * this.focalLength * this.disparityRadius / (z * 0.001*this.pixelSize);
// non-distorted coordinates relative to the (0.5 * this.pixelCorrectionWidth, 0.5 * this.pixelCorrectionHeight)in mm
double pXc = x * disparity / (SCENE_UNITS_SCALE * this.disparityRadius); // pixels
double pYc =-y * disparity / (SCENE_UNITS_SCALE * this.disparityRadius); // pixels
double rND = Math.sqrt(pXc*pXc + pYc*pYc)*0.001*this.pixelSize; // mm
double rD2RND = correctDistortions?getRDistByR(rND/this.distortionRadius):1.0;
double px = pXc * rD2RND + 0.5 * this.pixelCorrectionWidth; // distorted coordinates relative to the (0.5 * this.pixelCorrectionWidth, 0.5 * this.pixelCorrectionHeight)
double py = pYc * rD2RND + 0.5 * this.pixelCorrectionHeight; // in pixels
double [] dxy = {disparity, px, py};
return dxy;
}
/* Just for testing using delta instead of d */
public double [][] getImageJacobian(
double [] xyz0,
......
......@@ -224,6 +224,11 @@ public class ImageDtt {
public boolean isMonochrome() {
return monochrome;
}
// maybe change in the future
public boolean isAux() {
return monochrome;
}
public double [][][][] mdctStack(
final ImageStack imageStack,
......@@ -1615,16 +1620,16 @@ public class ImageDtt {
final double [] enh_ortho_scale = new double [corr_size];
for (int i = 0; i < corr_size; i++){
if ((i < (transform_size - imgdtt_params.enhortho_width)) || (i > (transform_size - 2 + imgdtt_params.enhortho_width))) {
if ((i < (transform_size - imgdtt_params.getEnhOrthoWidth(isAux()))) || (i > (transform_size - 2 + imgdtt_params.getEnhOrthoWidth(isAux())))) {
enh_ortho_scale[i] = 1.0;
} else {
enh_ortho_scale[i] = imgdtt_params.enhortho_scale;
enh_ortho_scale[i] = imgdtt_params.getEnhOrthoScale(isAux());
}
if (i == (transform_size-1)) enh_ortho_scale[i] = 0.0 ; // hardwired 0 in the center
enh_ortho_scale[i] *= Math.sin(Math.PI*(i+1.0)/(2*transform_size));
}
if (globalDebugLevel > 1){
System.out.println("enhortho_width="+ imgdtt_params.enhortho_width+" enhortho_scale="+ imgdtt_params.enhortho_scale);
System.out.println("getEnhOrthoWidth(isAux())="+ imgdtt_params.getEnhOrthoWidth(isAux())+" getEnhOrthoScale(isAux())="+ imgdtt_params.getEnhOrthoScale(isAux()));
for (int i = 0; i < corr_size; i++){
System.out.println(" enh_ortho_scale["+i+"]="+ enh_ortho_scale[i]);
......@@ -1782,8 +1787,8 @@ public class ImageDtt {
isMonochrome(), // boolean monochrome,
(globalDebugLevel > -1)); // boolean debug)
corr2d.createOrtoNotch(
imgdtt_params.enhortho_width, // double enhortho_width,
imgdtt_params.enhortho_scale, //double enhortho_scale,
imgdtt_params.getEnhOrthoWidth(isAux()), // double getEnhOrthoWidth(isAux()),
imgdtt_params.getEnhOrthoScale(isAux()), //double getEnhOrthoScale(isAux()),
(imgdtt_params.lma_debug_level > 1)); // boolean debug);
for (int nTile = ai.getAndIncrement(); nTile < nTilesInChn; nTile = ai.getAndIncrement()) {
......@@ -6192,20 +6197,20 @@ public class ImageDtt {
}
// reducing weight of on-axis correlation values to enhance detection of vertical/horizontal lines
// multiply correlation results inside the horizontal center strip 2*enhortho_width - 1 wide by enhortho_scale
// multiply correlation results inside the horizontal center strip 2*getEnhOrthoWidth(isAux()) - 1 wide by getEnhOrthoScale(isAux())
final double [] enh_ortho_scale = new double [corr_size];
for (int i = 0; i < corr_size; i++){
if ((i < (transform_size - imgdtt_params.enhortho_width)) || (i > (transform_size - 2 + imgdtt_params.enhortho_width))) {
if ((i < (transform_size - imgdtt_params.getEnhOrthoWidth(isAux()))) || (i > (transform_size - 2 + imgdtt_params.getEnhOrthoWidth(isAux())))) {
enh_ortho_scale[i] = 1.0;
} else {
enh_ortho_scale[i] = imgdtt_params.enhortho_scale;
enh_ortho_scale[i] = imgdtt_params.getEnhOrthoScale(isAux());
}
if (i == (transform_size-1)) enh_ortho_scale[i] = 0.0 ; // hardwired 0 in the center
enh_ortho_scale[i] *= Math.sin(Math.PI*(i+1.0)/(2*transform_size));
}
if (globalDebugLevel > 1){
System.out.println("enhortho_width="+ imgdtt_params.enhortho_width+" enhortho_scale="+ imgdtt_params.enhortho_scale);
System.out.println("getEnhOrthoWidth(isAux())="+ imgdtt_params.getEnhOrthoWidth(isAux())+" getEnhOrthoScale(isAux())="+ imgdtt_params.getEnhOrthoScale(isAux()));
for (int i = 0; i < corr_size; i++){
System.out.println(" enh_ortho_scale["+i+"]="+ enh_ortho_scale[i]);
......@@ -6383,8 +6388,8 @@ public class ImageDtt {
(globalDebugLevel > -1)); // boolean debug)
corr2d.createOrtoNotch(
imgdtt_params.enhortho_width, // double enhortho_width,
imgdtt_params.enhortho_scale, //double enhortho_scale,
imgdtt_params.getEnhOrthoWidth(isAux()), // double enhortho_width,
imgdtt_params.getEnhOrthoScale(isAux()), //double enhortho_scale,
false); // true); // boolean debug);
// public int enhortho_width = 2; // reduce weight of center correlation pixels from center (0 - none, 1 - center, 2 +/-1 from center)
// public double enhortho_scale = 0.0; // 0.2; // multiply center correlation pixels (inside enhortho_width)
......
......@@ -45,8 +45,10 @@ public class ImageDttParameters {
public int ortho_nsamples = 5; // number of samples to fit parabola
public double ortho_vasw_pwr = 2.0; // use data as weights when fitting parabola (high value samples are more important (when false use 3 samples only)
public int enhortho_width = 2; // reduce weight of center correlation pixels from center (0 - none, 1 - center, 2 +/-1 from center)
public double enhortho_scale = 0.0; // 0.2; // multiply center correlation pixels (inside enhortho_width)
private int enhortho_width = 2; // reduce weight of center correlation pixels from center (0 - none, 1 - center, 2 +/-1 from center)
private int enhortho_width_aux = 1; // reduce weight of center correlation pixels from center (0 - none, 1 - center, 2 +/-1 from center)
private double enhortho_scale = 0.0; // 0.2; // multiply center correlation pixels (inside enhortho_width)
private double enhortho_scale_aux = 0.0; // 0.2; // multiply center correlation pixels (inside enhortho_width)
public boolean ly_poly = false; // Use polynomial when measuring mismatch (false - use center of mass)
public double ly_crazy_poly = 1.0; // Maximal allowed mismatch difference calculated as polynomial maximum
public boolean ly_poly_backup = true; // Use CM offset measuremets if poly failed
......@@ -111,6 +113,22 @@ public class ImageDttParameters {
public boolean corr_var_cam = true; // New correlation mode compatible with 8 subcameras
public double cm_max_normalization = 0.55; // fraction of correlation maximum radius, being squared multiplied by maximum to have the same total mass
public int getEnhOrthoWidth(boolean aux) {
return aux ? enhortho_width_aux : enhortho_width;
}
public double getEnhOrthoScale(boolean aux) {
return aux ? enhortho_scale_aux : enhortho_scale;
}
// next 2 only used to read old config files
public void setEnhOrthoWidth (int w) {
enhortho_width = w;
}
public void setEnhOrthoScale (double s) {
enhortho_scale = s;
}
public void dialogQuestions(GenericJTabbedDialog gd) {
gd.addCheckbox ("Enable ImageDtt correlation debug layers", this.corr_mode_debug,
......@@ -135,8 +153,10 @@ public class ImageDttParameters {
gd.addNumericField("Use data as weights when fitting parabola for ortho mode", this.ortho_vasw_pwr,3,6,"",
"Raise value to this power and apply as weight. Reduce width to 3 samples if false, 5 OK when true");
gd.addNumericField("Reduce weight of center correlation pixels from center (0 - none, 1 - center, 2 +/-1 from center)", this.enhortho_width, 0);
gd.addNumericField("Multiply center correlation pixels (inside enhortho_width) (1.0 - disables enh_ortho)", this.enhortho_scale, 3);
gd.addNumericField("Reduce weight of center correlation pixels from center (0 - none, 1 - center, 2 +/-1 from center) - main camera", this.enhortho_width, 0);
gd.addNumericField("Reduce weight of center correlation pixels from center (0 - none, 1 - center, 2 +/-1 from center) - aux camera", this.enhortho_width_aux, 0);
gd.addNumericField("Multiply center correlation pixels (inside enhortho_width) (1.0 - disables enh_ortho) - main camera", this.enhortho_scale, 3);
gd.addNumericField("Multiply center correlation pixels (inside enhortho_width) (1.0 - disables enh_ortho) - aux camera", this.enhortho_scale_aux, 3);
gd.addCheckbox ("Use polynomial when measuring mismatch (false - use center of mass)", this.ly_poly);
gd.addNumericField("Maximal allowed mismatch difference calculated as polynomial maximum", this.ly_crazy_poly,3,6,"px",
......@@ -276,7 +296,9 @@ public class ImageDttParameters {
this.ortho_vasw_pwr = gd.getNextNumber();
this.enhortho_width= (int) gd.getNextNumber();
this.enhortho_width_aux=(int)gd.getNextNumber();
this.enhortho_scale= gd.getNextNumber();
this.enhortho_scale_aux= gd.getNextNumber();
this.ly_poly = gd.getNextBoolean();
this.ly_crazy_poly= gd.getNextNumber();
......@@ -364,7 +386,9 @@ public class ImageDttParameters {
properties.setProperty(prefix+"ortho_vasw", this.ortho_vasw_pwr+"");
properties.setProperty(prefix+"enhortho_width", this.enhortho_width +"");
properties.setProperty(prefix+"enhortho_width_aux", this.enhortho_width_aux +"");
properties.setProperty(prefix+"enhortho_scale", this.enhortho_scale +"");
properties.setProperty(prefix+"enhortho_scale_aux", this.enhortho_scale_aux +"");
properties.setProperty(prefix+"corr_offset", this.corr_offset +"");
properties.setProperty(prefix+"twice_diagonal", this.twice_diagonal +"");
......@@ -451,7 +475,9 @@ public class ImageDttParameters {
if (properties.getProperty(prefix+"ortho_vasw_pwr")!=null) this.ortho_vasw_pwr=Double.parseDouble(properties.getProperty(prefix+"ortho_vasw_pwr"));
if (properties.getProperty(prefix+"enhortho_width")!=null) this.enhortho_width=Integer.parseInt(properties.getProperty(prefix+"enhortho_width"));
if (properties.getProperty(prefix+"enhortho_width_aux")!=null) this.enhortho_width_aux=Integer.parseInt(properties.getProperty(prefix+"enhortho_width_aux"));
if (properties.getProperty(prefix+"enhortho_scale")!=null) this.enhortho_scale=Double.parseDouble(properties.getProperty(prefix+"enhortho_scale"));
if (properties.getProperty(prefix+"enhortho_scale_aux")!=null) this.enhortho_scale_aux=Double.parseDouble(properties.getProperty(prefix+"enhortho_scale_aux"));
if (properties.getProperty(prefix+"fo_correct")!=null) this.fo_correct=Boolean.parseBoolean(properties.getProperty(prefix+"fo_correct"));
......@@ -543,7 +569,9 @@ public class ImageDttParameters {
idp.ortho_vasw_pwr = this.ortho_vasw_pwr;
idp.enhortho_width = this.enhortho_width;
idp.enhortho_width_aux = this.enhortho_width_aux;
idp.enhortho_scale = this.enhortho_scale;
idp.enhortho_scale_aux = this.enhortho_scale_aux;
idp.ly_poly = this.ly_poly;
idp.ly_crazy_poly = this.ly_crazy_poly;
......
......@@ -77,7 +77,7 @@ public class QuadCLT {
public EyesisCorrectionParameters.CorrectionParameters correctionsParameters=null;
double [][][][][][] clt_kernels = null; // can be used to determine monochrome too?
public GeometryCorrection geometryCorrection = null;
double [] extrinsic_corr = new double [GeometryCorrection.CORR_NAMES.length]; // extrinsic corrections (needed from properties, before geometryCorrection
double [] extrinsic_vect = new double [GeometryCorrection.CORR_NAMES.length]; // extrinsic corrections (needed from properties, before geometryCorrection
public int extra_items = 8; // number of extra items saved with kernels (center offset (partial, full, derivatives)
public ImagePlus eyesisKernelImage = null;
public long startTime; // start of batch processing
......@@ -86,7 +86,7 @@ public class QuadCLT {
public double [][][] fine_corr = new double [4][2][6]; // per port, per x/y, set of 6 coefficient for fine geometric corrections
public TileProcessor tp = null;
public TileProcessor tp = null;
String image_name = null;
double [] gps_lla = null;
......@@ -249,7 +249,7 @@ public class QuadCLT {
}
GeometryCorrection gc = geometryCorrection;
if (gc == null) { // if it was not yet created
gc = new GeometryCorrection(this.extrinsic_corr);
gc = new GeometryCorrection(this.extrinsic_vect);
}
for (int i = 0; i < GeometryCorrection.CORR_NAMES.length; i++){
String name = prefix+"extrinsic_corr_"+GeometryCorrection.CORR_NAMES[i];
......@@ -278,14 +278,14 @@ public class QuadCLT {
}
GeometryCorrection gc = geometryCorrection;
if (gc == null) { // if it was not yet created
gc = new GeometryCorrection(this.extrinsic_corr);
gc = new GeometryCorrection(this.extrinsic_vect);
}
for (int i = 0; i < GeometryCorrection.CORR_NAMES.length; i++){
String other_name = other_prefix+"extrinsic_corr_"+GeometryCorrection.CORR_NAMES[i];
if (other_properties.getProperty(other_name)!=null) {
this.extrinsic_corr[i] = Double.parseDouble(other_properties.getProperty(other_name));
this.extrinsic_vect[i] = Double.parseDouble(other_properties.getProperty(other_name));
if (geometryCorrection != null){
geometryCorrection.getCorrVector().toArray()[i] = this.extrinsic_corr[i];
geometryCorrection.getCorrVector().toArray()[i] = this.extrinsic_vect[i];
}
}
String this_name = this_prefix+"extrinsic_corr_"+GeometryCorrection.CORR_NAMES[i];
......@@ -304,7 +304,7 @@ public class QuadCLT {
public void listGeometryCorrection(boolean full){
GeometryCorrection gc = geometryCorrection;
if (gc == null) { // if it was not yet created
gc = new GeometryCorrection(this.extrinsic_corr);
gc = new GeometryCorrection(this.extrinsic_vect);
}
gc.listGeometryCorrection(full);
}
......@@ -323,11 +323,19 @@ public class QuadCLT {
for (int i = 0; i < GeometryCorrection.CORR_NAMES.length; i++){
String name = prefix+"extrinsic_corr_"+GeometryCorrection.CORR_NAMES[i];
if (properties.getProperty(name)!=null) {
this.extrinsic_corr[i] = Double.parseDouble(properties.getProperty(name));
if (this.extrinsic_vect == null) {
// only create non-null array if there are saved values
this.extrinsic_vect = new double [GeometryCorrection.CORR_NAMES.length];
}
this.extrinsic_vect[i] = Double.parseDouble(properties.getProperty(name));
// System.out.println("getProperties():"+i+": getProperty("+name+") -> "+properties.getProperty(name)+"");
if (geometryCorrection != null){
geometryCorrection.getCorrVector().toArray()[i] = this.extrinsic_corr[i];
// if (geometryCorrection.getCorrVector().toArray() == null) {
// geometryCorrection.resetCorrVector(); // make it array of zeros
// }
// geometryCorrection.getCorrVector().toArray()[i] = this.extrinsic_vect[i];
geometryCorrection.setCorrVector(i,this.extrinsic_vect[i]);
}
}
}
......@@ -335,7 +343,16 @@ public class QuadCLT {
// geometryCorrection.setRigOffsetFromProperies(prefix, properties);
// }
if (geometryCorrection == null) {
geometryCorrection = new GeometryCorrection(this.extrinsic_corr);
double [] extrinsic_vect_saved = this.extrinsic_vect.clone();
boolean OK = initGeometryCorrection(0); // int debugLevel);
if (!OK) {
throw new IllegalArgumentException ("Failed to initialize geometry correction");
}
// Substitute vector generated in initGeometryCorrection with the saved from properties one:
// it also replaces data inside geometryCorrection. TODO: redo to isolate this.extrinsic_vect from geometryCorrection
this.extrinsic_vect = extrinsic_vect_saved;
geometryCorrection.setCorrVector(this.extrinsic_vect);
// geometryCorrection = new GeometryCorrection(this.extrinsic_vect);
}
if (is_aux) {
......@@ -359,12 +376,18 @@ public class QuadCLT {
}
public void resetGeometryCorrection() {
geometryCorrection = null;
extrinsic_corr = new double [GeometryCorrection.CORR_NAMES.length];
// extrinsic_vect = new double [GeometryCorrection.CORR_NAMES.length];
extrinsic_vect = null;
}
public boolean initGeometryCorrection(int debugLevel){
// keep rig offsets if edited
if (geometryCorrection == null) {
geometryCorrection = new GeometryCorrection(extrinsic_corr);
geometryCorrection = new GeometryCorrection(extrinsic_vect);
}
if (eyesisCorrections.pixelMapping == null) {
// need to initialize sensor data
// eyesisCorrections.initSensorFiles(.debugLevel..);
eyesisCorrections.initPixelMapping(debugLevel);
}
PixelMapping.SensorData [] sensors = eyesisCorrections.pixelMapping.sensors;
// verify that all sensors have the same distortion parameters
......@@ -386,12 +409,20 @@ public class QuadCLT {
return false;
}
}
// TODO: Verify correction sign!
double f_avg = geometryCorrection.getCorrVector().setZoomsFromF(
sensors[0].focalLength,
sensors[1].focalLength,
sensors[2].focalLength,
sensors[3].focalLength);
// following parameters are used for scaling extrinsic corrections
geometryCorrection.focalLength = f_avg;
geometryCorrection.pixelSize = sensors[0].pixelSize;
geometryCorrection.distortionRadius = sensors[0].distortionRadius;
for (int i = CorrVector.LENGTH_ANGLES; i < CorrVector.LENGTH; i++){
}
// set common distportion parameters
......@@ -4987,7 +5018,7 @@ public class QuadCLT {
System.out.println("Extrinsic corrections "+name);
if (geometryCorrection == null){
System.out.println("are not set, will be:");
System.out.println(new GeometryCorrection(this.extrinsic_corr).getCorrVector().toString());
System.out.println(new GeometryCorrection(this.extrinsic_vect).getCorrVector().toString());
} else {
System.out.println(geometryCorrection.getCorrVector().toString());
}
......@@ -5002,7 +5033,7 @@ public class QuadCLT {
// GeometryCorrection gc = this.geometryCorrection;
if (this.geometryCorrection == null){
System.out.println("geometryCorrection is not set, creating one");
this.geometryCorrection = new GeometryCorrection(this.extrinsic_corr);
this.geometryCorrection = new GeometryCorrection(this.extrinsic_vect);
}
boolean edited = this.geometryCorrection.editRig();
// if (edited) {
......@@ -5021,9 +5052,10 @@ public class QuadCLT {
public void resetExtrinsicCorr(
CLTParameters clt_parameters)
{
this.extrinsic_corr = new double [GeometryCorrection.CORR_NAMES.length];
// this.extrinsic_vect = new double [GeometryCorrection.CORR_NAMES.length];
this.extrinsic_vect = null;
if (geometryCorrection != null){
geometryCorrection.setCorrVector(null);
geometryCorrection.resetCorrVector();
}
if (clt_parameters.fine_corr_apply){
clt_parameters.fine_corr_ignore = false;
......@@ -6324,7 +6356,7 @@ public class QuadCLT {
false, // final boolean no_weak,
false, // final boolean use_last, //
// TODO: when useCombo - pay attention to borders (disregard)
false, // final boolean usePoly) // use polynomial method to find max), valid if useCombo == false
false, // final boolean useP}oly) // use polynomial method to find max), valid if useCombo == false
true, // final boolean copyDebug)
debugLevel);
......@@ -6425,33 +6457,33 @@ public class QuadCLT {
if (show_init_refine) tp.showScan(
tp.clt_3d_passes.get(refine_pass), // CLTPass3d scan,
"after_measure-"+tp.clt_3d_passes.size());
// if (nnn < (num_macro_refine-1)) {
// if (clt_parameters.combine_refine){
CLTPass3d combo_pass = tp.compositeScan(
tp.clt_3d_passes, // final ArrayList <CLTPass3d> passes,
bg_pass, // final int firstPass,
tp.clt_3d_passes.size(), // final int lastPassPlus1,
// tp.clt_3d_passes.get(bg_pass).getSelected(), // selected , // final boolean [] bg_tiles, // get from selected in clt_3d_passes.get(0);
// clt_parameters.ex_min_over,// final double ex_min_over, // when expanding over previously detected (by error) background, disregard far tiles
tp.getTrustedCorrelation(), // final double trustedCorrelation,
tp.getMaxOverexposure(), // final double max_overexposure,
0.0, // clt_parameters.bgnd_range, // final double disp_far, // limit results to the disparity range
clt_parameters.grow_disp_max, // final double disp_near,
clt_parameters.combine_min_strength, // final double minStrength,
clt_parameters.combine_min_hor, // final double minStrengthHor,
clt_parameters.combine_min_vert, // final double minStrengthVert,
false, // final boolean no_weak,
false, // final boolean use_last, //
// TODO: when useCombo - pay attention to borders (disregard)
false, // final boolean usePoly) // use polynomial method to find max), valid if useCombo == false
true, // final boolean copyDebug)
debugLevel);
// if (nnn < (num_macro_refine-1)) {
// if (clt_parameters.combine_refine){
CLTPass3d combo_pass = tp.compositeScan(
tp.clt_3d_passes, // final ArrayList <CLTPass3d> passes,
bg_pass, // final int firstPass,
tp.clt_3d_passes.size(), // final int lastPassPlus1,
// tp.clt_3d_passes.get(bg_pass).getSelected(), // selected , // final boolean [] bg_tiles, // get from selected in clt_3d_passes.get(0);
// clt_parameters.ex_min_over,// final double ex_min_over, // when expanding over previously detected (by error) background, disregard far tiles
tp.getTrustedCorrelation(), // final double trustedCorrelation,
tp.getMaxOverexposure(), // final double max_overexposure,
0.0, // clt_parameters.bgnd_range, // final double disp_far, // limit results to the disparity range
clt_parameters.grow_disp_max, // final double disp_near,
clt_parameters.combine_min_strength, // final double minStrength,
clt_parameters.combine_min_hor, // final double minStrengthHor,
clt_parameters.combine_min_vert, // final double minStrengthVert,
false, // final boolean no_weak,
false, // final boolean use_last, //
// TODO: when useCombo - pay attention to borders (disregard)
false, // final boolean usePoly) // use polynomial method to find max), valid if useCombo == false
true, // final boolean copyDebug)
debugLevel);
if (show_init_refine) tp.showScan(
combo_pass, // CLTPass3d scan,
"after_compositeScan-"+tp.clt_3d_passes.size());
if (show_init_refine) tp.showScan(
combo_pass, // CLTPass3d scan,
"after_compositeScan-"+tp.clt_3d_passes.size());
tp.clt_3d_passes.add(combo_pass);
tp.clt_3d_passes.add(combo_pass);
}
///// Refining after all added - end
......@@ -7735,12 +7767,12 @@ public class QuadCLT {
// public ImagePlus output3d(
public boolean output3d(
CLTParameters clt_parameters,
ColorProcParameters colorProcParameters,
EyesisCorrectionParameters.RGBParameters rgbParameters,
final int threadsMax, // maximal number of threads to launch
final boolean updateStatus,
final int debugLevel)
CLTParameters clt_parameters,
ColorProcParameters colorProcParameters,
EyesisCorrectionParameters.RGBParameters rgbParameters,
final int threadsMax, // maximal number of threads to launch
final boolean updateStatus,
final int debugLevel)
{
final boolean batch_mode = clt_parameters.batch_run;
this.startStepTime=System.nanoTime();
......@@ -7906,7 +7938,6 @@ public class QuadCLT {
/// CLTPass3d scan =
CLTMeasure( // perform single pass according to prepared tiles operations and disparity
image_data, // first index - number of image in a quad
// saturation_imp, //final boolean [][] saturation_imp, // (near) saturated pixels or null
clt_parameters,
scanIndex,
true, // final boolean save_textures,
......@@ -7917,12 +7948,6 @@ public class QuadCLT {
}
// TEMPORARY EXIT
// if (tp.clt_3d_passes.size() > 0) return null; // just to fool compiler
// int scan_limit = 10;
for (int scanIndex = next_pass; (scanIndex < tp.clt_3d_passes.size()) && (scanIndex < clt_parameters.max_clusters); scanIndex++){ // just temporary limiting
if (debugLevel > -1){
System.out.println("Generating cluster images (limit is set to "+clt_parameters.max_clusters+") largest, scan #"+scanIndex);
......@@ -7940,21 +7965,10 @@ public class QuadCLT {
CLTPass3d scan = tp.clt_3d_passes.get(scanIndex);
/*
if ((scanIndex == 73) ) {
tp.showScan(
tp.clt_3d_passes.get(scanIndex), // CLTPass3d scan,
"SELECTED-"+scanIndex);
}
*/
// TODO: use new updated disparity, for now just what was forced for the picture
double [] scan_disparity = new double [tilesX * tilesY];
int indx = 0;
// boolean [] scan_selected = scan.getSelected();
for (int ty = 0; ty < tilesY; ty ++) for (int tx = 0; tx < tilesX; tx ++){
// scan_selected[indx] = scan.tile_op[ty][tx] != 0;
scan_disparity[indx++] = scan.disparity[ty][tx];
}
if (clt_parameters.avg_cluster_disp){
......@@ -7971,17 +7985,8 @@ public class QuadCLT {
scan_disparity[i] = sdw;
}
}
/*
if ((scanIndex == 73)) {
tp.showScan(
tp.clt_3d_passes.get(scanIndex), // CLTPass3d scan,
"X3D-"+scanIndex);
}
*/
// boolean showTri = ((scanIndex < next_pass + 1) && clt_parameters.show_triangles) ||(scanIndex < 3);
boolean showTri = !batch_mode && (debugLevel > -1) && (((scanIndex < next_pass + 1) && clt_parameters.show_triangles) ||((scanIndex - next_pass) == 73));
// boolean showTri = ((scanIndex < next_pass + 1) && clt_parameters.show_triangles) ||(scanIndex == 49) || (scanIndex == 54);
try {
generateClusterX3d(
x3dOutput,
......@@ -8000,18 +8005,12 @@ public class QuadCLT {
clt_parameters.grow_disp_max, // other_range, // 2.0 'other_range - difference from the specified (*_CM)
clt_parameters.maxDispTriangle);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return false;
}
}
// now generate and save texture files (start with full, later use bounding rectangle?)
if ((x3d_path != null) && (x3dOutput != null)){
// x3d_path+=Prefs.getFileSeparator()+correctionsParameters.getModelName(this.image_name)+".x3d";
// x3dOutput.generateX3D(x3d_path);
x3dOutput.generateX3D(x3d_path+Prefs.getFileSeparator()+correctionsParameters.getModelName(this.image_name)+".x3d");
}
if (wfOutput != null){
......
......@@ -4835,9 +4835,7 @@ public class TileProcessor {
public CLTPass3d refinePassSetup( // prepare tile tasks for the second pass based on the previous one(s)
// final double [][][] image_data, // first index - number of image in a quad
CLTParameters clt_parameters,
// disparity range - differences from
boolean use_supertiles, // false (2018)
int bg_scan_index, // 0
double disparity_far, // 0.3
......
......@@ -9,7 +9,7 @@ import java.io.IOException;
** Copyright (C) 2017 Elphel, Inc.
**
** -----------------------------------------------------------------------------**
**
**
** WavefrontExport.java is free software: you can redistribute it and/or modify
** it under the terms of the GNU General Public License as published by
** the Free Software Foundation, either version 3 of the License, or
......@@ -27,12 +27,12 @@ import java.io.IOException;
*/
import java.util.ArrayList;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import com.elphel.imagej.cameras.CLTParameters;
import com.elphel.imagej.cameras.EyesisCorrectionParameters;
import com.elphel.imagej.cameras.EyesisCorrectionParameters.CorrectionParameters;
import com.elphel.imagej.tileprocessor.CLTPass3d;
import com.elphel.imagej.tileprocessor.GeometryCorrection;
......@@ -42,8 +42,8 @@ public class WavefrontExport {
static final String MTL_EXT=".mtl";
static final String OBJ_EXT=".obj";
GeometryCorrection geometry_correction;
public ArrayList <CLTPass3d> clt_3d_passes;
public CLTParameters clt_parameters;
public ArrayList <CLTPass3d> clt_3d_passes;
public CLTParameters clt_parameters;
public EyesisCorrectionParameters.CorrectionParameters correctionsParameters;
public int debugLevel = 1;
FileWriter obj_writer; // f0 = new FileWriter("output.txt");
......@@ -62,7 +62,7 @@ public class WavefrontExport {
public WavefrontExport(
String out_dir,
String project_name,
CLTParameters clt_parameters,
CLTParameters clt_parameters,
EyesisCorrectionParameters.CorrectionParameters correctionsParameters,
GeometryCorrection geometry_correction,
ArrayList <CLTPass3d> clt_3d_passes) throws IOException{
......@@ -77,7 +77,7 @@ public class WavefrontExport {
mtl_writer.write("#\n# Wavefront material file\n#\n");
obj_writer.write("#\n# Wavefront object file\n#\n");
obj_writer.write("mtllib ./"+project_name+MTL_EXT+"\n\n"); // add "./" to indicate relative path? // ./1488240527_408296.obj.mtl\n");
}
public void close()
{
......@@ -121,10 +121,10 @@ map_Kd 1488240527_408296-img2-texture.png
// mtl_writer.write("illum 0\n"); // should it be 0 orf 2?
mtl_writer.write("Ns 0.000000\n");
mtl_writer.write("map_Kd "+texture_path+"\n");
// Write OBJ file
// start using new material
obj_writer.write("usemtl "+material_id+"\n");
// output all vertices
obj_writer.write("# start from vertex : "+ v_index+ "\n");
......@@ -152,17 +152,17 @@ map_Kd 1488240527_408296-img2-texture.png
obj_writer.write("# vertices: "+ coordinate.length+"\n");
obj_writer.write("# texture vertices: "+ texCoord.length+"\n");
obj_writer.write("# faces (triangles): "+ triangles.length+"\n");
obj_writer.write("\n");
// increment indices
v_index += coordinate.length;
vt_index += texCoord.length;
f_index += triangles.length;
//f 27151/27139/27151 27141/27140/27141 27140/27141/27140 *
//f 27151/27139/27151 27141/27140/27141 27140/27141/27140 *
}
}
......@@ -46,7 +46,6 @@ import org.w3c.dom.Element;
import com.elphel.imagej.cameras.CLTParameters;
import com.elphel.imagej.cameras.EyesisCorrectionParameters;
import com.elphel.imagej.cameras.EyesisCorrectionParameters.CorrectionParameters;
import com.elphel.imagej.tileprocessor.CLTPass3d;
import com.elphel.imagej.tileprocessor.GeometryCorrection;
......@@ -54,8 +53,8 @@ import com.elphel.imagej.tileprocessor.GeometryCorrection;
public class X3dOutput {
GeometryCorrection geometry_correction;
public ArrayList <CLTPass3d> clt_3d_passes;
public CLTParameters clt_parameters;
public ArrayList <CLTPass3d> clt_3d_passes;
public CLTParameters clt_parameters;
public EyesisCorrectionParameters.CorrectionParameters correctionsParameters;
public int debugLevel = 1;
Document x3dDoc;
......@@ -67,10 +66,10 @@ public class X3dOutput {
public X3dOutput() {}
public X3dOutput(
CLTParameters clt_parameters,
CLTParameters clt_parameters,
EyesisCorrectionParameters.CorrectionParameters correctionsParameters,
GeometryCorrection geometry_correction,
ArrayList <CLTPass3d> clt_3d_passes){
ArrayList <CLTPass3d> clt_3d_passes){
this.clt_parameters = clt_parameters;
this.correctionsParameters = correctionsParameters;
this.geometry_correction = geometry_correction;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment