Commit f606663c authored by Andrey Filippov's avatar Andrey Filippov

more development

parent 67a6a946
......@@ -1882,7 +1882,8 @@ public class EyesisCorrectionParameters {
public double fat_zero = 0.0; // modify phase correlation to prevent division by very small numbers
public double corr_sigma = 0.8; // LPF correlarion sigma
public boolean norm_kern = true; // normalize kernels
public boolean gains_equalize = true; // equalize channel color gains among all cameras
public boolean gain_equalize = false;// equalize green channel gain
public boolean colors_equalize = true; // equalize R/G, B/G of the individual channels
public double novignetting_r = 0.2644; // reg gain in the center of sensor calibration R (instead of vignetting)
public double novignetting_g = 0.3733; // green gain in the center of sensor calibration G
public double novignetting_b = 0.2034; // blue gain in the center of sensor calibration B
......@@ -1892,7 +1893,35 @@ public class EyesisCorrectionParameters {
public double vignetting_max = 0.4; // value in vignetting data to correspond to 1x in the kernel
public double vignetting_range = 5.0; // do not try to correct vignetting less than vignetting_max/vignetting_range
public int kernel_step = 16; // source kernels step in pixels (have 1 kernel margin on each side)
public double disparity = 0.0; // nominal disparity between side of square cameras (pix)
public double disparity = 0.0; // nominal disparity between side of square cameras (pix)
public boolean correlate = true; // calculate correlation
public int corr_mask = 15; // bitmask of pairs to combine in the composite
public boolean corr_sym = false; // combine correlation with mirrored around disparity direction
public boolean corr_keep = true; // keep all partial correlations (otherwise - only combined one)
// TODO: what to do if some occlusion is present (only some channels correlate)
public double corr_offset = -1.0; //0.1; // add to pair correlation before multiplying by other pairs (between sum and product)
// negative - add, not mpy
public double corr_red = 0.5; // Red to green correlation weight
public double corr_blue = 0.2; // Blue to green correlation weight
public boolean corr_normalize = false; // normalize each correlation tile by rms
public double min_corr = 0.001; // minimal correlation value to consider valid
public double min_corr_normalized = 2.0; // minimal correlation value to consider valid when normalizing correlation results
public double max_corr_sigma = 1.5; // weights of points around global max to find fractional
// pixel location by quadratic approximation
public double max_corr_radius = 2.5; // maximal distance from int max to consider
// pixel location by quadratic approximation
public double corr_border_contrast = 0.01; // contrast of dotted border on correlation results
public int tile_task_op = 0xff; // bitmask of operation modes applied to tiles (0 - nothing), bits TBD later
// +(0..f) - images, +(00.f0) - process pairs +256 - force disparity when combining images
// window to process tiles;
public int tile_task_wl = 0; //
public int tile_task_wt = 0; //
public int tile_task_ww = 324; //
public int tile_task_wh = 242; //
public CLTParameters(){}
public void setProperties(String prefix,Properties properties){
......@@ -1909,7 +1938,8 @@ public class EyesisCorrectionParameters {
properties.setProperty(prefix+"fat_zero", this.fat_zero+"");
properties.setProperty(prefix+"corr_sigma", this.corr_sigma+"");
properties.setProperty(prefix+"norm_kern", this.norm_kern+"");
properties.setProperty(prefix+"gains_equalize", this.gains_equalize+"");
properties.setProperty(prefix+"gain_equalize", this.gain_equalize+"");
properties.setProperty(prefix+"colors_equalize", this.colors_equalize+"");
properties.setProperty(prefix+"novignetting_r", this.novignetting_r+"");
properties.setProperty(prefix+"novignetting_g", this.novignetting_g+"");
properties.setProperty(prefix+"novignetting_b", this.novignetting_b+"");
......@@ -1921,6 +1951,19 @@ public class EyesisCorrectionParameters {
properties.setProperty(prefix+"vignetting_range", this.vignetting_range+"");
properties.setProperty(prefix+"kernel_step", this.kernel_step+"");
properties.setProperty(prefix+"disparity", this.disparity +"");
properties.setProperty(prefix+"correlate", this.correlate+"");
properties.setProperty(prefix+"corr_mask", this.corr_mask+"");
properties.setProperty(prefix+"corr_sym", this.corr_sym+"");
properties.setProperty(prefix+"corr_keep", this.corr_keep+"");
properties.setProperty(prefix+"corr_offset", this.corr_offset +"");
properties.setProperty(prefix+"corr_red", this.corr_red +"");
properties.setProperty(prefix+"corr_blue", this.corr_blue +"");
properties.setProperty(prefix+"corr_normalize", this.corr_normalize+"");
properties.setProperty(prefix+"min_corr", this.min_corr +"");
properties.setProperty(prefix+"min_corr_normalized",this.min_corr_normalized +"");
properties.setProperty(prefix+"max_corr_sigma", this.max_corr_sigma +"");
properties.setProperty(prefix+"max_corr_radius", this.max_corr_radius +"");
properties.setProperty(prefix+"corr_border_contrast", this.corr_border_contrast +"");
}
public void getProperties(String prefix,Properties properties){
......@@ -1937,7 +1980,8 @@ public class EyesisCorrectionParameters {
if (properties.getProperty(prefix+"fat_zero")!=null) this.fat_zero=Double.parseDouble(properties.getProperty(prefix+"fat_zero"));
if (properties.getProperty(prefix+"corr_sigma")!=null) this.corr_sigma=Double.parseDouble(properties.getProperty(prefix+"corr_sigma"));
if (properties.getProperty(prefix+"norm_kern")!=null) this.norm_kern=Boolean.parseBoolean(properties.getProperty(prefix+"norm_kern"));
if (properties.getProperty(prefix+"gains_equalize")!=null) this.gains_equalize=Boolean.parseBoolean(properties.getProperty(prefix+"gains_equalize"));
if (properties.getProperty(prefix+"gain_equalize")!=null) this.gain_equalize=Boolean.parseBoolean(properties.getProperty(prefix+"gain_equalize"));
if (properties.getProperty(prefix+"colors_equalize")!=null)this.colors_equalize=Boolean.parseBoolean(properties.getProperty(prefix+"colors_equalize"));
if (properties.getProperty(prefix+"novignetting_r")!=null) this.novignetting_r=Double.parseDouble(properties.getProperty(prefix+"novignetting_r"));
if (properties.getProperty(prefix+"novignetting_g")!=null) this.novignetting_g=Double.parseDouble(properties.getProperty(prefix+"novignetting_g"));
if (properties.getProperty(prefix+"novignetting_b")!=null) this.novignetting_b=Double.parseDouble(properties.getProperty(prefix+"novignetting_b"));
......@@ -1948,6 +1992,19 @@ public class EyesisCorrectionParameters {
if (properties.getProperty(prefix+"vignetting_range")!=null) this.vignetting_range=Double.parseDouble(properties.getProperty(prefix+"vignetting_range"));
if (properties.getProperty(prefix+"kernel_step")!=null) this.kernel_step=Integer.parseInt(properties.getProperty(prefix+"kernel_step"));
if (properties.getProperty(prefix+"disparity")!=null) this.disparity=Double.parseDouble(properties.getProperty(prefix+"disparity"));
if (properties.getProperty(prefix+"correlate")!=null) this.correlate=Boolean.parseBoolean(properties.getProperty(prefix+"correlate"));
if (properties.getProperty(prefix+"corr_mask")!=null) this.corr_mask=Integer.parseInt(properties.getProperty(prefix+"corr_mask"));
if (properties.getProperty(prefix+"corr_sym")!=null) this.corr_sym=Boolean.parseBoolean(properties.getProperty(prefix+"corr_sym"));
if (properties.getProperty(prefix+"corr_keep")!=null) this.corr_keep=Boolean.parseBoolean(properties.getProperty(prefix+"corr_keep"));
if (properties.getProperty(prefix+"corr_offset")!=null) this.corr_offset=Double.parseDouble(properties.getProperty(prefix+"corr_offset"));
if (properties.getProperty(prefix+"corr_red")!=null) this.corr_red=Double.parseDouble(properties.getProperty(prefix+"corr_red"));
if (properties.getProperty(prefix+"corr_blue")!=null) this.corr_blue=Double.parseDouble(properties.getProperty(prefix+"corr_blue"));
if (properties.getProperty(prefix+"corr_normalize")!=null) this.corr_normalize=Boolean.parseBoolean(properties.getProperty(prefix+"corr_normalize"));
if (properties.getProperty(prefix+"min_corr")!=null) this.min_corr=Double.parseDouble(properties.getProperty(prefix+"min_corr"));
if (properties.getProperty(prefix+"min_corr_normalized")!=null)this.min_corr_normalized=Double.parseDouble(properties.getProperty(prefix+"min_corr_normalized"));
if (properties.getProperty(prefix+"max_corr_sigma")!=null) this.max_corr_sigma=Double.parseDouble(properties.getProperty(prefix+"max_corr_sigma"));
if (properties.getProperty(prefix+"max_corr_radius")!=null) this.max_corr_radius=Double.parseDouble(properties.getProperty(prefix+"max_corr_radius"));
if (properties.getProperty(prefix+"corr_border_contrast")!=null) this.corr_border_contrast=Double.parseDouble(properties.getProperty(prefix+"corr_border_contrast"));
}
public boolean showDialog() {
......@@ -1965,7 +2022,8 @@ public class EyesisCorrectionParameters {
gd.addNumericField("Modify phase correlation to prevent division by very small numbers", this.fat_zero, 4);
gd.addNumericField("LPF correlarion sigma ", this.corr_sigma, 3);
gd.addCheckbox ("Normalize kernels ", this.norm_kern);
gd.addCheckbox ("Equalize gains between channels", this.gains_equalize);
gd.addCheckbox ("Equalize green channel gain of the individual cnannels", this.gain_equalize);
gd.addCheckbox ("Equalize R/G, B/G balance of the individual channels", this.colors_equalize);
gd.addNumericField("Reg gain in the center of sensor calibration R (instead of vignetting)", this.novignetting_r, 4);
gd.addNumericField("Green gain in the center of sensor calibration G (instead of vignetting)",this.novignetting_g, 4);
gd.addNumericField("Blue gain in the center of sensor calibration B (instead of vignetting)", this.novignetting_b, 4);
......@@ -1976,7 +2034,20 @@ public class EyesisCorrectionParameters {
gd.addNumericField("Do not try to correct vignetting smaller than this fraction of max", this.vignetting_range, 3);
gd.addNumericField("Kernel step in pixels (has 1 kernel margin on each side)", this.kernel_step, 0);
gd.addNumericField("Nominal (rectilinear) disparity between side of square cameras (pix)", this.disparity, 3);
gd.addCheckbox ("Perfcorm coorrelation", this.correlate);
gd.addNumericField("itmask of pairs to combine in the composite (top, bottom, left,righth)", this.corr_mask, 0);
gd.addCheckbox ("Combine correlation with mirrored around disparity direction", this.corr_sym);
gd.addCheckbox ("Keep all partial correlations (otherwise - only combined one)", this.corr_keep);
gd.addNumericField("Add to pair correlation before multiplying by other pairs (between sum and product)", this.corr_offset, 6);
gd.addNumericField("Red to green correlation weight", this.corr_red, 4);
gd.addNumericField("Blue to green correlation weight", this.corr_blue, 4);
gd.addCheckbox ("Normalize each correlation tile by rms", this.corr_normalize);
gd.addNumericField("Minimal correlation value to consider valid", this.min_corr, 6);
gd.addNumericField("Minimal correlation value to consider valid when normalizing results", this.min_corr_normalized, 6);
gd.addNumericField("Sigma for weights of points around global max to find fractional", this.max_corr_sigma, 3);
gd.addNumericField("Maximal distance from int max to consider", this.max_corr_radius, 3);
gd.addNumericField("Contrast of dotted border on correlation results", this.corr_border_contrast, 6);
WindowTools.addScrollBars(gd);
gd.showDialog();
......@@ -1994,7 +2065,8 @@ public class EyesisCorrectionParameters {
this.fat_zero = gd.getNextNumber();
this.corr_sigma = gd.getNextNumber();
this.norm_kern= gd.getNextBoolean();
this.gains_equalize= gd.getNextBoolean();
this.gain_equalize= gd.getNextBoolean();
this.colors_equalize= gd.getNextBoolean();
this.novignetting_r= gd.getNextNumber();
this.novignetting_g= gd.getNextNumber();
this.novignetting_b= gd.getNextNumber();
......@@ -2005,6 +2077,19 @@ public class EyesisCorrectionParameters {
this.vignetting_range= gd.getNextNumber();
this.kernel_step= (int) gd.getNextNumber();
this.disparity= gd.getNextNumber();
this.correlate= gd.getNextBoolean();
this.corr_mask= (int) gd.getNextNumber();
this.corr_sym= gd.getNextBoolean();
this.corr_keep= gd.getNextBoolean();
this.corr_offset= gd.getNextNumber();
this.corr_red= gd.getNextNumber();
this.corr_blue= gd.getNextNumber();
this.corr_normalize= gd.getNextBoolean();
this.min_corr= gd.getNextNumber();
this.min_corr_normalized= gd.getNextNumber();
this.max_corr_sigma= gd.getNextNumber();
this.max_corr_radius= gd.getNextNumber();
this.corr_border_contrast= gd.getNextNumber();
return true;
}
......
......@@ -2802,7 +2802,7 @@ public class EyesisDCT {
}
}
}
if (clt_parameters.gains_equalize){
if (clt_parameters.gain_equalize){
}
......@@ -3424,64 +3424,16 @@ public class EyesisDCT {
}
}
// may need to equalize gains between channels
if (clt_parameters.gains_equalize){
double [][] avr_pix = new double [channelFiles.length][3];
double [] avr_RGB = {0.0,0.0,0.0};
int numChn = 0;
for (int srcChannel=0; srcChannel < channelFiles.length; srcChannel++){
int nFile=channelFiles[srcChannel];
if (nFile >=0){
for (int i = 0; i < avr_pix[srcChannel].length; i++) avr_pix[srcChannel][i] = 0;
float [] pixels=(float []) imp_srcs[srcChannel].getProcessor().getPixels();
int width = imp_srcs[srcChannel].getWidth();
int height = imp_srcs[srcChannel].getHeight();
for (int y = 0; y < height-1; y+=2){
for (int x = 0; x < width-1; x+=2){
avr_pix[srcChannel][0] += pixels[y*width+x +1];
avr_pix[srcChannel][2] += pixels[y*width+x+width ];
avr_pix[srcChannel][1] += pixels[y*width+x ];
avr_pix[srcChannel][1] += pixels[y*width+x+width+1];
}
}
avr_pix[srcChannel][0] /= 0.25*width*height;
avr_pix[srcChannel][1] /= 0.5 *width*height;
avr_pix[srcChannel][2] /= 0.25*width*height;
for (int j=0; j < avr_RGB.length; j++) avr_RGB[j] += avr_pix[srcChannel][j];
numChn++;
if (debugLevel>-1) {
System.out.println("processCLTSets(): set "+ setNames.get(nSet) + " channel "+srcChannel+
" R"+avr_pix[srcChannel][0]+" G"+avr_pix[srcChannel][1]+" B"+avr_pix[srcChannel][2]);
}
}
}
for (int j=0; j < avr_RGB.length; j++) avr_RGB[j] /= numChn;
if (debugLevel>-1) {
System.out.println("processCLTSets(): set "+ setNames.get(nSet) + "average color values: "+
" R="+avr_RGB[0]+" G=" + avr_RGB[1]+" B=" + avr_RGB[2]);
}
for (int srcChannel=0; srcChannel < channelFiles.length; srcChannel++){
int nFile=channelFiles[srcChannel];
if (nFile >=0){
double [] scales = new double [avr_RGB.length];
for (int j=0;j < scales.length; j++){
scales[j] = avr_RGB[j]/avr_pix[srcChannel][j];
}
float [] pixels=(float []) imp_srcs[srcChannel].getProcessor().getPixels();
int width = imp_srcs[srcChannel].getWidth();
int height = imp_srcs[srcChannel].getHeight();
for (int y = 0; y < height-1; y+=2){
for (int x = 0; x < width-1; x+=2){
pixels[y*width+x ] *= scales[1];
pixels[y*width+x+width+1] *= scales[1];
pixels[y*width+x +1] *= scales[0];
pixels[y*width+x+width ] *= scales[2];
}
}
}
}
// may need to equalize gains between channels
if (clt_parameters.gain_equalize || clt_parameters.colors_equalize){
channelGainsEqualize(
clt_parameters.gain_equalize,
clt_parameters.colors_equalize,
channelFiles,
imp_srcs,
setNames.get(nSet), // just for debug messeges == setNames.get(nSet)
debugLevel);
}
for (int srcChannel=0; srcChannel<channelFiles.length; srcChannel++){
int nFile=channelFiles[srcChannel];
if (nFile >=0){
......@@ -3970,10 +3922,6 @@ public class EyesisDCT {
return result;
}
public void processCLTQuads(
......@@ -4048,7 +3996,6 @@ public class EyesisDCT {
}
setFiles.get(setNames.indexOf(setName)).add(new Integer(nFile));
}
int iImage = 0;
for (int nSet = 0; nSet < setNames.size(); nSet++){
int maxChn = 0;
for (int i = 0; i < setFiles.get(nSet).size(); i++){
......@@ -4062,7 +4009,7 @@ public class EyesisDCT {
}
ImagePlus [] imp_srcs = new ImagePlus[channelFiles.length];
double [] scaleExposure = new double[channelFiles.length];
double [] scaleExposures = new double[channelFiles.length];
for (int srcChannel=0; srcChannel<channelFiles.length; srcChannel++){
int nFile=channelFiles[srcChannel];
imp_srcs[srcChannel]=null;
......@@ -4092,10 +4039,10 @@ public class EyesisDCT {
eyesisCorrections.JP4_INSTANCE.decodeProperiesFromInfo(imp_srcs[srcChannel]); // decode existent properties from info
if (debugLevel>0) System.out.println("Processing "+sourceFiles[nFile]);
}
scaleExposure[srcChannel] = 1.0;
scaleExposures[srcChannel] = 1.0;
if (!Double.isNaN(referenceExposures[nFile]) && (imp_srcs[srcChannel].getProperty("EXPOSURE")!=null)){
scaleExposure[srcChannel] = referenceExposures[nFile]/Double.parseDouble((String) imp_srcs[srcChannel].getProperty("EXPOSURE"));
if (debugLevel>0) System.out.println("Will scale intensity (to compensate for exposure) by "+scaleExposure);
scaleExposures[srcChannel] = referenceExposures[nFile]/Double.parseDouble((String) imp_srcs[srcChannel].getProperty("EXPOSURE"));
if (debugLevel>0) System.out.println("Will scale intensity (to compensate for exposure) by "+scaleExposures[srcChannel]);
}
imp_srcs[srcChannel].setProperty("name", correctionsParameters.getNameFromSourceTiff(sourceFiles[nFile]));
imp_srcs[srcChannel].setProperty("channel", srcChannel); // it may already have channel
......@@ -4170,96 +4117,38 @@ public class EyesisDCT {
}
}
}
// once per quad here
// may need to equalize gains between channels
if (clt_parameters.gains_equalize){
double [][] avr_pix = new double [channelFiles.length][3];
double [] avr_RGB = {0.0,0.0,0.0};
int numChn = 0;
for (int srcChannel=0; srcChannel < channelFiles.length; srcChannel++){
int nFile=channelFiles[srcChannel];
if (nFile >=0){
for (int i = 0; i < avr_pix[srcChannel].length; i++) avr_pix[srcChannel][i] = 0;
float [] pixels=(float []) imp_srcs[srcChannel].getProcessor().getPixels();
int width = imp_srcs[srcChannel].getWidth();
int height = imp_srcs[srcChannel].getHeight();
for (int y = 0; y < height-1; y+=2){
for (int x = 0; x < width-1; x+=2){
avr_pix[srcChannel][0] += pixels[y*width+x +1];
avr_pix[srcChannel][2] += pixels[y*width+x+width ];
avr_pix[srcChannel][1] += pixels[y*width+x ];
avr_pix[srcChannel][1] += pixels[y*width+x+width+1];
}
}
avr_pix[srcChannel][0] /= 0.25*width*height;
avr_pix[srcChannel][1] /= 0.5 *width*height;
avr_pix[srcChannel][2] /= 0.25*width*height;
for (int j=0; j < avr_RGB.length; j++) avr_RGB[j] += avr_pix[srcChannel][j];
numChn++;
if (debugLevel>-1) {
System.out.println("processCLTSets(): set "+ setNames.get(nSet) + " channel "+srcChannel+
" R"+avr_pix[srcChannel][0]+" G"+avr_pix[srcChannel][1]+" B"+avr_pix[srcChannel][2]);
}
}
}
for (int j=0; j < avr_RGB.length; j++) avr_RGB[j] /= numChn;
if (debugLevel>-1) {
System.out.println("processCLTSets(): set "+ setNames.get(nSet) + "average color values: "+
" R="+avr_RGB[0]+" G=" + avr_RGB[1]+" B=" + avr_RGB[2]);
}
for (int srcChannel=0; srcChannel < channelFiles.length; srcChannel++){
int nFile=channelFiles[srcChannel];
if (nFile >=0){
double [] scales = new double [avr_RGB.length];
for (int j=0;j < scales.length; j++){
scales[j] = avr_RGB[j]/avr_pix[srcChannel][j];
}
float [] pixels=(float []) imp_srcs[srcChannel].getProcessor().getPixels();
int width = imp_srcs[srcChannel].getWidth();
int height = imp_srcs[srcChannel].getHeight();
for (int y = 0; y < height-1; y+=2){
for (int x = 0; x < width-1; x+=2){
pixels[y*width+x ] *= scales[1];
pixels[y*width+x+width+1] *= scales[1];
pixels[y*width+x +1] *= scales[0];
pixels[y*width+x+width ] *= scales[2];
}
}
}
}
// may need to equalize gains between channels
if (clt_parameters.gain_equalize || clt_parameters.colors_equalize){
channelGainsEqualize(
clt_parameters.gain_equalize,
clt_parameters.colors_equalize,
channelFiles,
imp_srcs,
setNames.get(nSet), // just for debug messeges == setNames.get(nSet)
debugLevel);
}
for (int srcChannel=0; srcChannel<channelFiles.length; srcChannel++){
int nFile=channelFiles[srcChannel];
if (nFile >=0){
processCLTQuad( // returns ImagePlus, but it already should be saved/shown
imp_srcs, // [srcChannel], // should have properties "name"(base for saving results), "channel","path"
clt_parameters,
debayerParameters,
nonlinParameters,
colorProcParameters,
channelGainParameters,
rgbParameters,
convolveFFTSize, // 128 - fft size, kernel size should be size/2
scaleExposure[srcChannel],
threadsMax, // maximal number of threads to launch
updateStatus,
debugLevel);
// warp result (add support for different color modes)
if (this.correctionsParameters.equirectangular){
if (equirectangularParameters.clearFullMap) eyesisCorrections.pixelMapping.deleteEquirectangularMapFull(srcChannel); // save memory? //removeUnusedSensorData - no, use equirectangular specific settings
if (equirectangularParameters.clearAllMaps) eyesisCorrections.pixelMapping.deleteEquirectangularMapAll(srcChannel); // save memory? //removeUnusedSensorData - no, use equirectangular specific settings
}
//pixelMapping
Runtime.getRuntime().gc();
if (debugLevel >-1) System.out.println("Processing image "+(iImage+1)+" (of "+fileIndices.length+") finished at "+
IJ.d2s(0.000000001*(System.nanoTime()-this.startTime),3)+" sec, --- Free memory="+Runtime.getRuntime().freeMemory()+" (of "+Runtime.getRuntime().totalMemory()+")");
if (eyesisCorrections.stopRequested.get()>0) {
System.out.println("User requested stop");
return;
}
iImage++;
}
// once per quad here
processCLTQuad( // returns ImagePlus, but it already should be saved/shown
imp_srcs, // [srcChannel], // should have properties "name"(base for saving results), "channel","path"
clt_parameters,
debayerParameters,
nonlinParameters,
colorProcParameters,
channelGainParameters,
rgbParameters,
convolveFFTSize, // 128 - fft size, kernel size should be size/2
scaleExposures,
threadsMax, // maximal number of threads to launch
updateStatus,
debugLevel);
Runtime.getRuntime().gc();
if (debugLevel >-1) System.out.println("Processing set "+(nSet+1)+" (of "+fileIndices.length+") finished at "+
IJ.d2s(0.000000001*(System.nanoTime()-this.startTime),3)+" sec, --- Free memory="+Runtime.getRuntime().freeMemory()+" (of "+Runtime.getRuntime().totalMemory()+")");
if (eyesisCorrections.stopRequested.get()>0) {
System.out.println("User requested stop");
return;
}
}
System.out.println("Processing "+fileIndices.length+" files finished at "+
......@@ -4275,7 +4164,7 @@ public class EyesisDCT {
CorrectionColorProc.ColorGainsParameters channelGainParameters,
EyesisCorrectionParameters.RGBParameters rgbParameters,
int convolveFFTSize, // 128 - fft size, kernel size should be size/2
double scaleExposure,
double [] scaleExposures, // probably not needed here
final int threadsMax, // maximal number of threads to launch
final boolean updateStatus,
final int debugLevel){
......@@ -4292,24 +4181,20 @@ public class EyesisDCT {
int channel= (Integer) imp_quad[0].getProperty("channel");
String path= (String) imp_quad[0].getProperty("path");
// String title=name+"-"+String.format("%02d", channel);
// ImagePlus result=imp_src;
ImagePlus [] results = new ImagePlus[imp_quad.length];
for (int i = 0; i < results.length; i++) {
results[i] = imp_quad[i];
results[i].setTitle(results[i].getTitle()+"RAW");
}
if (debugLevel>1) System.out.println("processing: "+path);
// result.setTitle(title+"RAW");
double [][][] double_stacks = new double [imp_quad.length][][];
for (int i = 0; i < double_stacks.length; i++){
double_stacks[i] = eyesisCorrections.bayerToDoubleStack(
imp_quad[i], // source Bayer image, linearized, 32-bit (float))
null); // no margins, no oversample
}
String [] rbg_titles = {"Red", "Blue", "Green"};
// String [] rbg_titles = {"Red", "Blue", "Green"};
ImageStack stack;
// =================
ImageDtt image_dtt = new ImageDtt();
......@@ -4336,8 +4221,6 @@ public class EyesisDCT {
(clt_parameters.dbg_mode & 256) != 0, // transpose convolve
threadsMax,
debugLevel);
// updateStatus);
System.out.println("clt_data.length="+clt_data.length+" clt_data[0].length="+clt_data[0].length
+" clt_data[0][0].length="+clt_data[0][0].length+" clt_data[0][0][0].length="+
......@@ -4401,10 +4284,6 @@ public class EyesisDCT {
(tilesY + 0) * clt_parameters.transform_size,
true,
results[iQuad].getTitle()+"-rbg_sigma");
/*
}
}
*/
if (debugLevel > 0) sdfa_instance.showArrays(iclt_data,
(tilesX + 0) * clt_parameters.transform_size,
(tilesY + 0) * clt_parameters.transform_size,
......@@ -4419,8 +4298,6 @@ public class EyesisDCT {
(tilesY + 0) * clt_parameters.transform_size,
sliceNames); // or use null to get chn-nn slice names
if (debugLevel > -1){
double [] chn_avg = {0.0,0.0,0.0};
float [] pixels;
......@@ -4461,7 +4338,7 @@ public class EyesisDCT {
imp_dbg,
this.correctionsParameters);
}
if (debugLevel > 1) System.out.println("before colors.3, scaleExposure="+scaleExposure+" scale = "+(255.0/eyesisCorrections.psfSubpixelShouldBe4/eyesisCorrections.psfSubpixelShouldBe4/scaleExposure));
if (debugLevel > 1) System.out.println("before colors.3, scaleExposure="+scaleExposures[iQuad]+" scale = "+(255.0/eyesisCorrections.psfSubpixelShouldBe4/eyesisCorrections.psfSubpixelShouldBe4/scaleExposures[iQuad]));
CorrectionColorProc correctionColorProc=new CorrectionColorProc(eyesisCorrections.stopRequested);
double [][] yPrPb=new double [3][];
// if (dct_parameters.color_DCT){
......@@ -4471,7 +4348,7 @@ public class EyesisDCT {
// 255.0/this.psfSubpixelShouldBe4/this.psfSubpixelShouldBe4, // double scale, // initial maximal pixel value (16))
// 255.0/eyesisCorrections.psfSubpixelShouldBe4/eyesisCorrections.psfSubpixelShouldBe4/scaleExposure, // double scale, // initial maximal pixel value (16))
// 255.0/2/2/scaleExposure, // double scale, // initial maximal pixel value (16))
255.0/scaleExposure, // double scale, // initial maximal pixel value (16))
255.0/scaleExposures[iQuad], // double scale, // initial maximal pixel value (16))
colorProcParameters,
channelGainParameters,
channel,
......@@ -4581,7 +4458,699 @@ public class EyesisDCT {
// work version, tested - above
public void processCLTQuadCorrs(
EyesisCorrectionParameters.CLTParameters clt_parameters,
EyesisCorrectionParameters.DebayerParameters debayerParameters,
EyesisCorrectionParameters.NonlinParameters nonlinParameters,
EyesisCorrectionParameters.ColorProcParameters colorProcParameters,
CorrectionColorProc.ColorGainsParameters channelGainParameters,
EyesisCorrectionParameters.RGBParameters rgbParameters,
EyesisCorrectionParameters.EquirectangularParameters equirectangularParameters,
int convolveFFTSize, // 128 - fft size, kernel size should be size/2
final int threadsMax, // maximal number of threads to launch
final boolean updateStatus,
final int debugLevel)
{
this.startTime=System.nanoTime();
String [] sourceFiles=correctionsParameters.getSourcePaths();
boolean [] enabledFiles=new boolean[sourceFiles.length];
for (int i=0;i<enabledFiles.length;i++) enabledFiles[i]=false;
int numFilesToProcess=0;
int numImagesToProcess=0;
for (int nFile=0;nFile<enabledFiles.length;nFile++){
if ((sourceFiles[nFile]!=null) && (sourceFiles[nFile].length()>1)) {
int [] channels={correctionsParameters.getChannelFromSourceTiff(sourceFiles[nFile])};
if (correctionsParameters.isJP4()){
int subCamera= channels[0]- correctionsParameters.firstSubCamera; // to match those in the sensor files
// removeUnusedSensorData should be off!?
channels=this.eyesisCorrections.pixelMapping.channelsForSubCamera(subCamera);
}
if (channels!=null){
for (int i=0;i<channels.length;i++) if (eyesisCorrections.isChannelEnabled(channels[i])){
if (!enabledFiles[nFile]) numFilesToProcess++;
enabledFiles[nFile]=true;
numImagesToProcess++;
}
}
}
}
if (numFilesToProcess==0){
System.out.println("No files to process (of "+sourceFiles.length+")");
return;
} else {
if (debugLevel>0) System.out.println(numFilesToProcess+ " files to process (of "+sourceFiles.length+"), "+numImagesToProcess+" images to process");
}
double [] referenceExposures=eyesisCorrections.calcReferenceExposures(debugLevel); // multiply each image by this and divide by individual (if not NaN)
int [][] fileIndices=new int [numImagesToProcess][2]; // file index, channel number
int index=0;
for (int nFile=0;nFile<enabledFiles.length;nFile++){
if ((sourceFiles[nFile]!=null) && (sourceFiles[nFile].length()>1)) {
int [] channels={correctionsParameters.getChannelFromSourceTiff(sourceFiles[nFile])};
if (correctionsParameters.isJP4()){
int subCamera= channels[0]- correctionsParameters.firstSubCamera; // to match those in the sensor files
channels=eyesisCorrections.pixelMapping.channelsForSubCamera(subCamera);
}
if (channels!=null){
for (int i=0;i<channels.length;i++) if (eyesisCorrections.isChannelEnabled(channels[i])){
fileIndices[index ][0]=nFile;
fileIndices[index++][1]=channels[i];
}
}
}
}
ArrayList<String> setNames = new ArrayList<String>();
ArrayList<ArrayList<Integer>> setFiles = new ArrayList<ArrayList<Integer>>();
for (int iImage=0;iImage<fileIndices.length;iImage++){
int nFile=fileIndices[iImage][0];
String setName = correctionsParameters.getNameFromSourceTiff(sourceFiles[nFile]);
if (!setNames.contains(setName)) {
setNames.add(setName);
setFiles.add(new ArrayList<Integer>());
}
setFiles.get(setNames.indexOf(setName)).add(new Integer(nFile));
}
for (int nSet = 0; nSet < setNames.size(); nSet++){
int maxChn = 0;
for (int i = 0; i < setFiles.get(nSet).size(); i++){
int chn = fileIndices[setFiles.get(nSet).get(i)][1];
if (chn > maxChn) maxChn = chn;
}
int [] channelFiles = new int[maxChn+1];
for (int i =0; i < channelFiles.length; i++) channelFiles[i] = -1;
for (int i = 0; i < setFiles.get(nSet).size(); i++){
channelFiles[fileIndices[setFiles.get(nSet).get(i)][1]] = setFiles.get(nSet).get(i);
}
ImagePlus [] imp_srcs = new ImagePlus[channelFiles.length];
double [] scaleExposures = new double[channelFiles.length];
for (int srcChannel=0; srcChannel<channelFiles.length; srcChannel++){
int nFile=channelFiles[srcChannel];
imp_srcs[srcChannel]=null;
if (nFile >=0){
if (correctionsParameters.isJP4()){
int subchannel=eyesisCorrections.pixelMapping.getSubChannel(srcChannel);
if (this.correctionsParameters.swapSubchannels01) {
switch (subchannel){
case 0: subchannel=1; break;
case 1: subchannel=0; break;
}
}
if (debugLevel>0) System.out.println("Processing set " + setNames.get(nSet)+" channel "+srcChannel+" - subchannel "+subchannel+" of "+sourceFiles[nFile]);
ImagePlus imp_composite=eyesisCorrections.JP4_INSTANCE.open(
"", // path,
sourceFiles[nFile],
"", //arg - not used in JP46 reader
true, // un-apply camera color gains
null, // new window
false); // do not show
imp_srcs[srcChannel]=eyesisCorrections.JP4_INSTANCE.demuxImage(imp_composite, subchannel);
if (imp_srcs[srcChannel] == null) imp_srcs[srcChannel] = imp_composite; // not a composite image
// do we need to add any properties?
} else {
imp_srcs[srcChannel]=new ImagePlus(sourceFiles[nFile]);
// (new JP46_Reader_camera(false)).decodeProperiesFromInfo(imp_src); // decode existent properties from info
eyesisCorrections.JP4_INSTANCE.decodeProperiesFromInfo(imp_srcs[srcChannel]); // decode existent properties from info
if (debugLevel>0) System.out.println("Processing "+sourceFiles[nFile]);
}
scaleExposures[srcChannel] = 1.0;
if (!Double.isNaN(referenceExposures[nFile]) && (imp_srcs[srcChannel].getProperty("EXPOSURE")!=null)){
scaleExposures[srcChannel] = referenceExposures[nFile]/Double.parseDouble((String) imp_srcs[srcChannel].getProperty("EXPOSURE"));
if (debugLevel>0) System.out.println("Will scale intensity (to compensate for exposure) by "+scaleExposures[srcChannel]);
}
imp_srcs[srcChannel].setProperty("name", correctionsParameters.getNameFromSourceTiff(sourceFiles[nFile]));
imp_srcs[srcChannel].setProperty("channel", srcChannel); // it may already have channel
imp_srcs[srcChannel].setProperty("path", sourceFiles[nFile]); // it may already have channel
if (this.correctionsParameters.pixelDefects && (eyesisCorrections.defectsXY!=null)&& (eyesisCorrections.defectsXY[srcChannel]!=null)){
// apply pixel correction
int numApplied= eyesisCorrections.correctDefects(
imp_srcs[srcChannel],
srcChannel,
debugLevel);
if ((debugLevel>0) && (numApplied>0)) { // reduce verbosity after verified defect correction works
System.out.println("Corrected "+numApplied+" pixels in "+sourceFiles[nFile]);
}
}
if (this.correctionsParameters.vignetting){
if ((eyesisCorrections.channelVignettingCorrection==null) || (srcChannel<0) || (srcChannel>=eyesisCorrections.channelVignettingCorrection.length) || (eyesisCorrections.channelVignettingCorrection[srcChannel]==null)){
System.out.println("No vignetting data for channel "+srcChannel);
return;
}
float [] pixels=(float []) imp_srcs[srcChannel].getProcessor().getPixels();
if (pixels.length!=eyesisCorrections.channelVignettingCorrection[srcChannel].length){
System.out.println("Vignetting data for channel "+srcChannel+" has "+eyesisCorrections.channelVignettingCorrection[srcChannel].length+" pixels, image "+sourceFiles[nFile]+" has "+pixels.length);
return;
}
// TODO: Move to do it once:
double min_non_zero = 0.0;
for (int i=0;i<pixels.length;i++){
double d = eyesisCorrections.channelVignettingCorrection[srcChannel][i];
if ((d > 0.0) && ((min_non_zero == 0) || (min_non_zero > d))){
min_non_zero = d;
}
}
double max_vign_corr = clt_parameters.vignetting_range*min_non_zero;
System.out.println("Vignetting data: channel="+srcChannel+", min = "+min_non_zero);
for (int i=0;i<pixels.length;i++){
double d = eyesisCorrections.channelVignettingCorrection[srcChannel][i];
if (d > max_vign_corr) d = max_vign_corr;
pixels[i]*=d;
}
// Scale here, combine with vignetting later?
int width = imp_srcs[srcChannel].getWidth();
int height = imp_srcs[srcChannel].getHeight();
for (int y = 0; y < height-1; y+=2){
for (int x = 0; x < width-1; x+=2){
pixels[y*width+x ] *= clt_parameters.scale_g;
pixels[y*width+x+width+1] *= clt_parameters.scale_g;
pixels[y*width+x +1] *= clt_parameters.scale_r;
pixels[y*width+x+width ] *= clt_parameters.scale_b;
}
}
} else { // assuming GR/BG pattern
System.out.println("Applying fixed color gain correction parameters: Gr="+
clt_parameters.novignetting_r+", Gg="+clt_parameters.novignetting_g+", Gb="+clt_parameters.novignetting_b);
float [] pixels=(float []) imp_srcs[srcChannel].getProcessor().getPixels();
int width = imp_srcs[srcChannel].getWidth();
int height = imp_srcs[srcChannel].getHeight();
double kr = clt_parameters.scale_r/clt_parameters.novignetting_r;
double kg = clt_parameters.scale_g/clt_parameters.novignetting_g;
double kb = clt_parameters.scale_b/clt_parameters.novignetting_b;
for (int y = 0; y < height-1; y+=2){
for (int x = 0; x < width-1; x+=2){
pixels[y*width+x ] *= kg;
pixels[y*width+x+width+1] *= kg;
pixels[y*width+x +1] *= kr;
pixels[y*width+x+width ] *= kb;
}
}
}
}
}
// once per quad here
// may need to equalize gains between channels
if (clt_parameters.gain_equalize || clt_parameters.colors_equalize){
channelGainsEqualize(
clt_parameters.gain_equalize,
clt_parameters.colors_equalize,
channelFiles,
imp_srcs,
setNames.get(nSet), // just for debug messeges == setNames.get(nSet)
debugLevel);
}
// once per quad here
processCLTQuadCorr( // returns ImagePlus, but it already should be saved/shown
imp_srcs, // [srcChannel], // should have properties "name"(base for saving results), "channel","path"
clt_parameters,
debayerParameters,
nonlinParameters,
colorProcParameters,
channelGainParameters,
rgbParameters,
convolveFFTSize, // 128 - fft size, kernel size should be size/2
scaleExposures,
threadsMax, // maximal number of threads to launch
updateStatus,
debugLevel);
Runtime.getRuntime().gc();
if (debugLevel >-1) System.out.println("Processing set "+(nSet+1)+" (of "+fileIndices.length+") finished at "+
IJ.d2s(0.000000001*(System.nanoTime()-this.startTime),3)+" sec, --- Free memory="+Runtime.getRuntime().freeMemory()+" (of "+Runtime.getRuntime().totalMemory()+")");
if (eyesisCorrections.stopRequested.get()>0) {
System.out.println("User requested stop");
return;
}
}
System.out.println("Processing "+fileIndices.length+" files finished at "+
IJ.d2s(0.000000001*(System.nanoTime()-this.startTime),3)+" sec, --- Free memory="+Runtime.getRuntime().freeMemory()+" (of "+Runtime.getRuntime().totalMemory()+")");
}
public void channelGainsEqualize(
boolean gain_equalize,
boolean colors_equalize,
int [] channelFiles,
ImagePlus [] imp_srcs,
String setName, // just for debug messeges == setNames.get(nSet)
int debugLevel){
double [][] avr_pix = new double [channelFiles.length][3];
double [] avr_RGB = {0.0,0.0,0.0};
int numChn = 0;
for (int srcChannel=0; srcChannel < channelFiles.length; srcChannel++){
int nFile=channelFiles[srcChannel];
if (nFile >=0){
for (int i = 0; i < avr_pix[srcChannel].length; i++) avr_pix[srcChannel][i] = 0;
float [] pixels=(float []) imp_srcs[srcChannel].getProcessor().getPixels();
int width = imp_srcs[srcChannel].getWidth();
int height = imp_srcs[srcChannel].getHeight();
for (int y = 0; y < height-1; y+=2){
for (int x = 0; x < width-1; x+=2){
avr_pix[srcChannel][0] += pixels[y*width+x +1];
avr_pix[srcChannel][2] += pixels[y*width+x+width ];
avr_pix[srcChannel][1] += pixels[y*width+x ];
avr_pix[srcChannel][1] += pixels[y*width+x+width+1];
}
}
avr_pix[srcChannel][0] /= 0.25*width*height;
avr_pix[srcChannel][1] /= 0.5 *width*height;
avr_pix[srcChannel][2] /= 0.25*width*height;
for (int j=0; j < avr_RGB.length; j++) avr_RGB[j] += avr_pix[srcChannel][j];
numChn++;
if (debugLevel>-1) {
System.out.println("processCLTSets(): set "+ setName + " channel "+srcChannel+
" R"+avr_pix[srcChannel][0]+" G"+avr_pix[srcChannel][1]+" B"+avr_pix[srcChannel][2]);
}
}
}
for (int j=0; j < avr_RGB.length; j++) avr_RGB[j] /= numChn;
if (debugLevel>-1) {
System.out.println("processCLTSets(): set "+ setName + "average color values: "+
" R="+avr_RGB[0]+" G=" + avr_RGB[1]+" B=" + avr_RGB[2]);
}
for (int srcChannel=0; srcChannel < channelFiles.length; srcChannel++){
int nFile=channelFiles[srcChannel];
if (nFile >=0){
double [] scales = new double [avr_RGB.length];
for (int j=0;j < scales.length; j++){
scales[j] = 1.0;
if (gain_equalize){
scales[j] *= avr_RGB[1]/avr_pix[srcChannel][1]; // 1 - index of green color
}
if (colors_equalize){
scales[j] *= avr_RGB[j]/avr_pix[srcChannel][j] / (avr_RGB[1]/avr_pix[srcChannel][1]);
}
}
float [] pixels=(float []) imp_srcs[srcChannel].getProcessor().getPixels();
int width = imp_srcs[srcChannel].getWidth();
int height = imp_srcs[srcChannel].getHeight();
for (int y = 0; y < height-1; y+=2){
for (int x = 0; x < width-1; x+=2){
pixels[y*width+x ] *= scales[1];
pixels[y*width+x+width+1] *= scales[1];
pixels[y*width+x +1] *= scales[0];
pixels[y*width+x+width ] *= scales[2];
}
}
}
}
}
public ImagePlus [] processCLTQuadCorr(
ImagePlus [] imp_quad, // should have properties "name"(base for saving results), "channel","path"
EyesisCorrectionParameters.CLTParameters clt_parameters,
EyesisCorrectionParameters.DebayerParameters debayerParameters,
EyesisCorrectionParameters.NonlinParameters nonlinParameters,
EyesisCorrectionParameters.ColorProcParameters colorProcParameters,
CorrectionColorProc.ColorGainsParameters channelGainParameters,
EyesisCorrectionParameters.RGBParameters rgbParameters,
int convolveFFTSize, // 128 - fft size, kernel size should be size/2
double [] scaleExposures, // probably not needed here
final int threadsMax, // maximal number of threads to launch
final boolean updateStatus,
final int debugLevel){
boolean advanced=this.correctionsParameters.zcorrect || this.correctionsParameters.equirectangular;
// boolean crop= advanced? true: this.correctionsParameters.crop;
boolean rotate= advanced? false: this.correctionsParameters.rotate;
double JPEG_scale= advanced? 1.0: this.correctionsParameters.JPEG_scale;
boolean toRGB= advanced? true: this.correctionsParameters.toRGB;
showDoubleFloatArrays sdfa_instance = new showDoubleFloatArrays(); // just for debugging?
// may use this.StartTime to report intermediate steps execution times
String name=(String) imp_quad[0].getProperty("name");
// int channel= Integer.parseInt((String) imp_src.getProperty("channel"));
int channel= (Integer) imp_quad[0].getProperty("channel");
String path= (String) imp_quad[0].getProperty("path");
ImagePlus [] results = new ImagePlus[imp_quad.length];
for (int i = 0; i < results.length; i++) {
results[i] = imp_quad[i];
results[i].setTitle(results[i].getTitle()+"RAW");
}
if (debugLevel>1) System.out.println("processing: "+path);
double [][][] double_stacks = new double [imp_quad.length][][];
for (int i = 0; i < double_stacks.length; i++){
double_stacks[i] = eyesisCorrections.bayerToDoubleStack(
imp_quad[i], // source Bayer image, linearized, 32-bit (float))
null); // no margins, no oversample
}
// String [] rbg_titles = {"Red", "Blue", "Green"};
ImageStack stack;
// =================
ImageDtt image_dtt = new ImageDtt();
for (int i = 0; i < double_stacks.length; i++){
for (int j =0 ; j < double_stacks[i][0].length; j++){
double_stacks[i][2][j]*=0.5; // Scale green 0.5 to compensate more pixels than R,B
}
}
int tilesY = imp_quad[0].getHeight()/clt_parameters.transform_size;
int tilesX = imp_quad[0].getWidth()/clt_parameters.transform_size;
double [][][][] clt_corr_combo = null;
double [][][][][] clt_corr_partial = null;
if (clt_parameters.correlate){
clt_corr_combo = new double [2][tilesY][tilesX][];
if (clt_parameters.corr_keep){
clt_corr_partial = new double [tilesY][tilesX][][][];
}
}
double [][] disparity_map = new double [8][]; //[0] -residual disparity, [1] - orthogonal (just for debugging)
double [][][][][][] clt_data = image_dtt.clt_aberrations_quad_corr(
clt_parameters.disparity, // final double disparity,
double_stacks, // final double [][][] imade_data, // first index - number of image in a quad
// correlation results - final and partial
clt_corr_combo, // [tilesY][tilesX][(2*transform_size-1)*(2*transform_size-1)] // if null - will not calculate
clt_corr_partial, // [tilesY][tilesX][quad]color][(2*transform_size-1)*(2*transform_size-1)] // if null - will not calculate
disparity_map, // [2][tilesY * tilesX]
imp_quad[0].getWidth(), // final int width,
clt_parameters.fat_zero, // add to denominator to modify phase correlation (same units as data1, data2). <0 - pure sum
clt_parameters.corr_sym,
clt_parameters.corr_offset,
clt_parameters.corr_red,
clt_parameters.corr_blue,
clt_parameters.corr_sigma,
clt_parameters.corr_mask,
clt_parameters.corr_normalize, // normalize correlation results by rms
clt_parameters.corr_normalize? clt_parameters.min_corr_normalized: clt_parameters.min_corr, // 0.0001; // minimal correlation value to consider valid
clt_parameters.max_corr_sigma,// 1.5; // weights of points around global max to find fractional
clt_parameters.max_corr_radius,
geometryCorrection, // final GeometryCorrection geometryCorrection,
clt_kernels, // final double [][][][][][] clt_kernels, // [channel_in_quad][color][tileY][tileX][band][pixel] , size should match image (have 1 tile around)
clt_parameters.kernel_step,
clt_parameters.transform_size,
clt_parameters.clt_window,
clt_parameters.shift_x, // final int shiftX, // shift image horizontally (positive - right) - just for testing
clt_parameters.shift_y, // final int shiftY, // shift image vertically (positive - down)
clt_parameters.tileX, // final int debug_tileX,
clt_parameters.tileY, // final int debug_tileY,
(clt_parameters.dbg_mode & 64) != 0, // no fract shift
(clt_parameters.dbg_mode & 128) != 0, // no convolve
// (clt_parameters.dbg_mode & 256) != 0, // transpose convolve
threadsMax,
debugLevel);
System.out.println("clt_data.length="+clt_data.length+" clt_data[0].length="+clt_data[0].length
+" clt_data[0][0].length="+clt_data[0][0].length+" clt_data[0][0][0].length="+
clt_data[0][0][0].length+" clt_data[0][0][0][0].length="+clt_data[0][0][0][0].length+
" clt_data[0][0][0][0][0].length="+clt_data[0][0][0][0][0].length);
// visualize correlation results
if (clt_corr_combo!=null){
if (disparity_map != null){
if (debugLevel > -1){
String [] disparity_titles = {"int_disparity", "int_disp_ortho","cm_disparity", "cm_disp_ortho","poly_disparity", "poly_disp_ortho",
"strength", "variety"};
sdfa_instance.showArrays(
disparity_map,
tilesX,
tilesY,
true,
name+"-DISP_MAP-D"+clt_parameters.disparity,
disparity_titles);
}
}
if (debugLevel > -1){
double [][] corr_rslt = new double [clt_corr_combo.length][];
String [] titles = {"combo","sum"};
for (int i = 0; i<corr_rslt.length; i++) {
corr_rslt[i] = image_dtt.corr_dbg(
clt_corr_combo[i],
clt_parameters.corr_border_contrast,
threadsMax,
debugLevel);
}
sdfa_instance.showArrays(
corr_rslt,
tilesX*(2*clt_parameters.transform_size),
tilesY*(2*clt_parameters.transform_size),
true,
name + "-CORR-D"+clt_parameters.disparity,
titles );
}
if (debugLevel > -1){
if (clt_corr_partial!=null){
String [] allColorNames = {"red","blue","green","combo"};
String [] titles = new String[clt_corr_partial.length];
for (int i = 0; i < titles.length; i++){
titles[i]=allColorNames[i % allColorNames.length]+"_"+(i / allColorNames.length);
}
double [][] corr_rslt_partial = image_dtt.corr_partial_dbg(
clt_corr_partial,
clt_parameters.corr_border_contrast,
threadsMax,
debugLevel);
sdfa_instance.showArrays(
corr_rslt_partial,
tilesX*(2*clt_parameters.transform_size),
tilesY*(2*clt_parameters.transform_size),
true,
name+"-PART_CORR-D"+clt_parameters.disparity,
titles);
}
}
}
for (int iQuad = 0; iQuad <clt_data.length; iQuad++){
String title=name+"-"+String.format("%02d", iQuad);
String titleFull=title+"-SPLIT-D"+clt_parameters.disparity;
if (clt_parameters.corr_sigma > 0){ // no filter at all
for (int chn = 0; chn < clt_data[iQuad].length; chn++) {
image_dtt.clt_lpf(
clt_parameters.corr_sigma,
clt_data[iQuad][chn],
threadsMax,
debugLevel);
}
}
// int tilesY = imp_quad[iQuad].getHeight()/clt_parameters.transform_size;
// int tilesX = imp_quad[iQuad].getWidth()/clt_parameters.transform_size;
if (debugLevel > 0){
System.out.println("--tilesX="+tilesX);
System.out.println("--tilesY="+tilesY);
}
if (debugLevel > 1){
double [][] clt = new double [clt_data[iQuad].length*4][];
for (int chn = 0; chn < clt_data[iQuad].length; chn++) {
double [][] clt_set = image_dtt.clt_dbg(
clt_data [iQuad][chn],
threadsMax,
debugLevel);
for (int ii = 0; ii < clt_set.length; ii++) clt[chn*4+ii] = clt_set[ii];
}
if (debugLevel > 0){
sdfa_instance.showArrays(clt,
tilesX*clt_parameters.transform_size,
tilesY*clt_parameters.transform_size,
true,
results[iQuad].getTitle()+"-CLT-D"+clt_parameters.disparity);
}
}
double [][] iclt_data = new double [clt_data[iQuad].length][];
for (int chn=0; chn<iclt_data.length;chn++){
iclt_data[chn] = image_dtt.iclt_2d(
clt_data[iQuad][chn], // scanline representation of dcd data, organized as dct_size x dct_size tiles
clt_parameters.transform_size, // final int
clt_parameters.clt_window, // window_type
15, // clt_parameters.iclt_mask, //which of 4 to transform back
0, // clt_parameters.dbg_mode, //which of 4 to transform back
threadsMax,
debugLevel);
}
if (debugLevel > 0) sdfa_instance.showArrays(
iclt_data,
(tilesX + 0) * clt_parameters.transform_size,
(tilesY + 0) * clt_parameters.transform_size,
true,
results[iQuad].getTitle()+"-rbg_sigma");
if (debugLevel > 0) sdfa_instance.showArrays(iclt_data,
(tilesX + 0) * clt_parameters.transform_size,
(tilesY + 0) * clt_parameters.transform_size,
true,
results[iQuad].getTitle()+"-ICLT-RGB-D"+clt_parameters.disparity);
// convert to ImageStack of 3 slices
String [] sliceNames = {"red", "blue", "green"};
stack = sdfa_instance.makeStack(
iclt_data,
(tilesX + 0) * clt_parameters.transform_size,
(tilesY + 0) * clt_parameters.transform_size,
sliceNames); // or use null to get chn-nn slice names
if (debugLevel > -1){
double [] chn_avg = {0.0,0.0,0.0};
float [] pixels;
int width = stack.getWidth();
int height = stack.getHeight();
for (int c = 0; c <3; c++){
pixels = (float[]) stack.getPixels(c+1);
for (int i = 0; i<pixels.length; i++){
chn_avg[c] += pixels[i];
}
}
chn_avg[0] /= width*height;
chn_avg[1] /= width*height;
chn_avg[2] /= width*height;
System.out.println("Processed channels averages: R="+chn_avg[0]+", G="+chn_avg[2]+", B="+chn_avg[1]);
}
if (!this.correctionsParameters.colorProc){
results[iQuad]= new ImagePlus(titleFull, stack);
eyesisCorrections.saveAndShow(
results[iQuad],
this.correctionsParameters);
continue; // return results;
}
if (debugLevel > 1) System.out.println("before colors.1");
//Processing colors - changing stack sequence to r-g-b (was r-b-g)
if (!eyesisCorrections.fixSliceSequence(
stack,
debugLevel)){
if (debugLevel > -1) System.out.println("fixSliceSequence() returned false");
return null;
}
if (debugLevel > 1) System.out.println("before colors.2");
if (debugLevel > 1){
ImagePlus imp_dbg=new ImagePlus(imp_quad[iQuad].getTitle()+"-"+channel+"-preColors",stack);
eyesisCorrections.saveAndShow(
imp_dbg,
this.correctionsParameters);
}
if (debugLevel > 1) System.out.println("before colors.3, scaleExposure="+scaleExposures[iQuad]+" scale = "+(255.0/eyesisCorrections.psfSubpixelShouldBe4/eyesisCorrections.psfSubpixelShouldBe4/scaleExposures[iQuad]));
CorrectionColorProc correctionColorProc=new CorrectionColorProc(eyesisCorrections.stopRequested);
double [][] yPrPb=new double [3][];
// if (dct_parameters.color_DCT){
// need to get YPbPr - not RGB here
// } else {
correctionColorProc.processColorsWeights(stack, // just gamma convert? TODO: Cleanup? Convert directly form the linear YPrPb
// 255.0/this.psfSubpixelShouldBe4/this.psfSubpixelShouldBe4, // double scale, // initial maximal pixel value (16))
// 255.0/eyesisCorrections.psfSubpixelShouldBe4/eyesisCorrections.psfSubpixelShouldBe4/scaleExposure, // double scale, // initial maximal pixel value (16))
// 255.0/2/2/scaleExposure, // double scale, // initial maximal pixel value (16))
255.0/scaleExposures[iQuad], // double scale, // initial maximal pixel value (16))
colorProcParameters,
channelGainParameters,
channel,
null, //correctionDenoise.getDenoiseMask(),
this.correctionsParameters.blueProc,
debugLevel);
if (debugLevel > 1) System.out.println("Processed colors to YPbPr, total number of slices="+stack.getSize());
if (debugLevel > 1) {
ImagePlus imp_dbg=new ImagePlus("procColors",stack);
eyesisCorrections.saveAndShow(
imp_dbg,
this.correctionsParameters);
}
float [] fpixels;
int [] slices_YPrPb = {8,6,7};
yPrPb=new double [3][];
for (int n = 0; n < slices_YPrPb.length; n++){
fpixels = (float[]) stack.getPixels(slices_YPrPb[n]);
yPrPb[n] = new double [fpixels.length];
for (int i = 0; i < fpixels.length; i++) yPrPb[n][i] = fpixels[i];
}
if (toRGB) {
System.out.println("correctionColorProc.YPrPbToRGB");
stack = YPrPbToRGB(yPrPb,
colorProcParameters.kr, // 0.299;
colorProcParameters.kb, // 0.114;
stack.getWidth());
title=titleFull; // including "-DECONV" or "-COMBO"
titleFull=title+"-RGB-float-D"+clt_parameters.disparity;
//Trim stack to just first 3 slices
if (debugLevel > 1){ // 2){
ImagePlus imp_dbg=new ImagePlus("YPrPbToRGB",stack);
eyesisCorrections.saveAndShow(
imp_dbg,
this.correctionsParameters);
}
while (stack.getSize() > 3) stack.deleteLastSlice();
if (debugLevel > 1) System.out.println("Trimming color stack");
} else {
title=titleFull; // including "-DECONV" or "-COMBO"
titleFull=title+"-YPrPb-D"+clt_parameters.disparity; // including "-DECONV" or "-COMBO"
if (debugLevel > 1) System.out.println("Using full stack, including YPbPr");
}
results[iQuad]= new ImagePlus(titleFull, stack);
// rotate the result
if (rotate){ // never rotate for equirectangular
stack=eyesisCorrections.rotateStack32CW(stack);
}
if (!toRGB && !this.correctionsParameters.jpeg){ // toRGB set for equirectangular
if (debugLevel > 1) System.out.println("!toRGB && !this.correctionsParameters.jpeg");
eyesisCorrections.saveAndShow(results[iQuad], this.correctionsParameters);
continue; // return result;
} else { // that's not the end result, save if required
if (debugLevel > 1) System.out.println("!toRGB && !this.correctionsParameters.jpeg - else");
eyesisCorrections.saveAndShow(results[iQuad],
eyesisCorrections.correctionsParameters,
eyesisCorrections.correctionsParameters.save32,
false,
eyesisCorrections.correctionsParameters.JPEG_quality); // save, no show
}
// convert to RGB48 (16 bits per color component)
ImagePlus imp_RGB;
stack=eyesisCorrections.convertRGB32toRGB16Stack(
stack,
rgbParameters);
titleFull=title+"-RGB48-D"+clt_parameters.disparity;
results[iQuad]= new ImagePlus(titleFull, stack);
// ImagePlus imp_RGB24;
results[iQuad].updateAndDraw();
if (debugLevel > 1) System.out.println("result.updateAndDraw(), "+titleFull+"-RGB48");
CompositeImage compositeImage=eyesisCorrections.convertToComposite(results[iQuad]);
if (!this.correctionsParameters.jpeg && !advanced){ // RGB48 was the end result
if (debugLevel > 1) System.out.println("if (!this.correctionsParameters.jpeg && !advanced)");
eyesisCorrections.saveAndShow(compositeImage, this.correctionsParameters);
continue; // return result;
} else { // that's not the end result, save if required
if (debugLevel > 1) System.out.println("if (!this.correctionsParameters.jpeg && !advanced) - else");
eyesisCorrections.saveAndShow(compositeImage, this.correctionsParameters, this.correctionsParameters.save16, false); // save, no show
// eyesisCorrections.saveAndShow(compositeImage, this.correctionsParameters, this.correctionsParameters.save16, true); // save, no show
}
imp_RGB=eyesisCorrections.convertRGB48toRGB24(
stack,
title+"-RGB24-D"+clt_parameters.disparity,
0, 65536, // r range 0->0, 65536->256
0, 65536, // g range
0, 65536);// b range
if (JPEG_scale!=1.0){
ImageProcessor ip=imp_RGB.getProcessor();
ip.setInterpolationMethod(ImageProcessor.BICUBIC);
ip=ip.resize((int)(ip.getWidth()*JPEG_scale),(int) (ip.getHeight()*JPEG_scale));
imp_RGB= new ImagePlus(imp_RGB.getTitle(),ip);
imp_RGB.updateAndDraw();
}
eyesisCorrections.saveAndShow(imp_RGB, this.correctionsParameters);
}
return results;
}
......
......@@ -496,6 +496,7 @@ private Panel panel1,
addButton("CLT process files", panelClt1, color_process);
addButton("CLT process sets", panelClt1, color_process);
addButton("CLT process quads", panelClt1, color_process);
addButton("CLT process corr", panelClt1, color_process);
add(panelClt1);
}
pack();
......@@ -4022,6 +4023,7 @@ private Panel panel1,
for (int chn = 0; chn < clt_corr.length; chn++) {
corr_rslt[chn] = image_dtt.corr_dbg(
corr_tiles[chn],
CLT_PARAMETERS.corr_border_contrast,
THREADS_MAX,
DEBUG_LEVEL);
......@@ -4412,6 +4414,104 @@ private Panel panel1,
}
return;
} else if (label.equals("CLT process corr")) {
DEBUG_LEVEL=MASTER_DEBUG_LEVEL;
EYESIS_CORRECTIONS.setDebug(DEBUG_LEVEL);
if (EYESIS_DCT == null){
EYESIS_DCT = new EyesisDCT (
EYESIS_CORRECTIONS,
CORRECTION_PARAMETERS,
DCT_PARAMETERS);
if (DEBUG_LEVEL > 0){
System.out.println("Created new EyesisDCT instance, will need to read CLT kernels");
}
}
String configPath=null;
if (EYESIS_CORRECTIONS.correctionsParameters.saveSettings) {
configPath=EYESIS_CORRECTIONS.correctionsParameters.selectResultsDirectory(
true,
true);
if (configPath==null){
String msg="No results directory selected, command aborted";
System.out.println("Warning: "+msg);
IJ.showMessage("Warning",msg);
return;
}
configPath+=Prefs.getFileSeparator()+"autoconfig";
try {
saveTimestampedProperties(
configPath, // full path or null
null, // use as default directory if path==null
true,
PROPERTIES);
} catch (Exception e){
String msg="Failed to save configuration to "+configPath+", command aborted";
System.out.println("Error: "+msg);
IJ.showMessage("Error",msg);
return;
}
}
EYESIS_CORRECTIONS.initSensorFiles(DEBUG_LEVEL);
int numChannels=EYESIS_CORRECTIONS.getNumChannels();
NONLIN_PARAMETERS.modifyNumChannels(numChannels);
CHANNEL_GAINS_PARAMETERS.modifyNumChannels(numChannels);
if (!EYESIS_DCT.CLTKernelsAvailable()){
if (DEBUG_LEVEL > 0){
System.out.println("Reading CLT kernels");
}
EYESIS_DCT.readCLTKernels(
CLT_PARAMETERS,
THREADS_MAX,
UPDATE_STATUS, // update status info
DEBUG_LEVEL);
if (DEBUG_LEVEL > 1){
EYESIS_DCT.showCLTKernels(
THREADS_MAX,
UPDATE_STATUS, // update status info
DEBUG_LEVEL);
}
}
if (!EYESIS_DCT.geometryCorrectionAvailable()){
if (DEBUG_LEVEL > 0){
System.out.println("Calculating geometryCorrection");
}
if (!EYESIS_DCT.initGeometryCorrection(DEBUG_LEVEL+2)){
return;
}
}
///========================================
EYESIS_DCT.processCLTQuadCorrs(
CLT_PARAMETERS, // EyesisCorrectionParameters.DCTParameters dct_parameters,
DEBAYER_PARAMETERS, //EyesisCorrectionParameters.DebayerParameters debayerParameters,
NONLIN_PARAMETERS, //EyesisCorrectionParameters.NonlinParameters nonlinParameters,
COLOR_PROC_PARAMETERS, //EyesisCorrectionParameters.ColorProcParameters colorProcParameters,
CHANNEL_GAINS_PARAMETERS, //CorrectionColorProc.ColorGainsParameters channelGainParameters,
RGB_PARAMETERS, //EyesisCorrectionParameters.RGBParameters rgbParameters,
EQUIRECTANGULAR_PARAMETERS, // EyesisCorrectionParameters.EquirectangularParameters equirectangularParameters,
CONVOLVE_FFT_SIZE, //int convolveFFTSize, // 128 - fft size, kernel size should be size/2
THREADS_MAX, //final int threadsMax, // maximal number of threads to launch
UPDATE_STATUS, //final boolean updateStatus,
DEBUG_LEVEL); //final int debugLevel);
if (configPath!=null) {
saveTimestampedProperties( // save config again
configPath, // full path or null
null, // use as default directory if path==null
true,
PROPERTIES);
}
return;
// End of buttons code
......
......@@ -330,8 +330,10 @@ public class GeometryCorrection {
double [] a={this.distortionC,this.distortionB,this.distortionA,this.distortionA5,this.distortionA6,this.distortionA7,this.distortionA8};
for (int i = 0; i < numSensors; i++){
// non-distorted XY of the shifted location of the individual sensor
double pXci = pXc + disparity * this.rXY[i][0]; // in pixels
double pYci = pYc + disparity * this.rXY[i][1];
// double pXci = pXc + disparity * this.rXY[i][0]; // in pixels
// double pYci = pYc + disparity * this.rXY[i][1];
double pXci = pXc - disparity * this.rXY[i][0]; // in pixels
double pYci = pYc - disparity * this.rXY[i][1];
// calculate back to distorted
double rNDi = Math.sqrt(pXci*pXci + pYci*pYci); // in pixels
// Rdist/R=A8*R^7+A7*R^6+A6*R^5+A5*R^4+A*R^3+B*R^2+C*R+(1-A6-A7-A6-A5-A-B-C)");
......
......@@ -659,7 +659,7 @@ public class ImageDtt {
// perform 2d clt and apply aberration corrections, all colors
public double [][][][][] clt_aberrations(
final double [][] imade_data,
final double [][] image_data,
final int width,
final double [][][][][] clt_kernels, // [color][tileY][tileX][band][pixel] , size should match image (have 1 tile around)
final int kernel_step,
......@@ -675,8 +675,8 @@ public class ImageDtt {
final int threadsMax, // maximal number of threads to launch
final int globalDebugLevel)
{
final int nChn = imade_data.length;
final int height=imade_data[0].length/width;
final int nChn = image_data.length;
final int height=image_data[0].length/width;
final int tilesX=width/transform_size;
final int tilesY=height/transform_size;
final int nTilesInChn=tilesX*tilesY;
......@@ -707,7 +707,7 @@ public class ImageDtt {
centerY = tileY * transform_size + transform_size/2 - shiftY;
double [] fract_shiftXY = extract_correct_tile( // return a pair of resudual offsets
imade_data,
image_data,
width, // image width
clt_kernels, // [color][tileY][tileX][band][pixel]
clt_data[chn][tileY][tileX], //double [][] clt_tile, // should be double [4][];
......@@ -759,7 +759,7 @@ public class ImageDtt {
public double [][][][][][] clt_aberrations_quad(
final double disparity,
final double [][][] imade_data, // first index - number of image in a quad
final double [][][] image_data, // first index - number of image in a quad
final int width,
final GeometryCorrection geometryCorrection,
final double [][][][][][] clt_kernels, // [channel_in_quad][color][tileY][tileX][band][pixel] , size should match image (have 1 tile around)
......@@ -776,24 +776,18 @@ public class ImageDtt {
final int threadsMax, // maximal number of threads to launch
final int globalDebugLevel)
{
final int quad = 4;
final int nChn = imade_data[0].length;
final int height=imade_data[0][0].length/width;
final int quad = 4; // number of subcameras
final int numcol = 3; // number of colors
final int nChn = image_data[0].length;
final int height=image_data[0][0].length/width;
final int tilesX=width/transform_size;
final int tilesY=height/transform_size;
final int nTilesInChn=tilesX*tilesY;
final int nTiles=tilesX*tilesY*nChn;
// final int nTiles=tilesX*tilesY*nChn;
final double [][][][][][] clt_data = new double[quad][nChn][tilesY][tilesX][4][];
final Thread[] threads = newThreadArray(threadsMax);
final AtomicInteger ai = new AtomicInteger(0);
double [][] centersXY_dbg = geometryCorrection.getPortsCoordinates(
0.0, // centerX,
0.0, // centerY,
disparity);
if (globalDebugLevel > 0) {
System.out.println("clt_aberrations(): width="+width+" height="+height+" transform_size="+transform_size+
......@@ -804,84 +798,561 @@ public class ImageDtt {
public void run() {
DttRad2 dtt = new DttRad2(transform_size);
dtt.set_window(window_type);
int tileY,tileX, chn;
int tileY,tileX; // , chn;
// showDoubleFloatArrays sdfa_instance = new showDoubleFloatArrays(); // just for debugging?
double centerX; // center of aberration-corrected (common model) tile, X
double centerY; //
double [][] fract_shiftsXY = new double[quad][];
for (int nTile = ai.getAndIncrement(); nTile < nTiles; nTile = ai.getAndIncrement()) {
// for (int nTile = ai.getAndIncrement(); nTile < nTiles; nTile = ai.getAndIncrement()) {
for (int nTile = ai.getAndIncrement(); nTile < nTilesInChn; nTile = ai.getAndIncrement()) {
// TODO: make all color channels to be processed here (atomically)
chn=nTile/nTilesInChn;
tileY =(nTile % nTilesInChn)/tilesX;
// chn=nTile/nTilesInChn;
// tileY =(nTile % nTilesInChn)/tilesX;
// tileX = nTile % tilesX;
tileY = nTile /tilesX;
tileX = nTile % tilesX;
centerX = tileX * transform_size + transform_size/2 - shiftX;
centerY = tileY * transform_size + transform_size/2 - shiftY;
double [][] centersXY = geometryCorrection.getPortsCoordinates(
centerX,
centerY,
disparity);
if ((globalDebugLevel > -1) && (tileX >= debug_tileX - 2) && (tileX <= debug_tileX + 2) &&
(tileY >= debug_tileY - 2) && (tileY <= debug_tileY+2)) {
for (int i = 0; i < quad; i++) {
System.out.println("clt_aberrations_quad(): color="+chn+", tileX="+tileX+", tileY="+tileY+
" centerX="+centerX+" centerY="+centerY+" disparity="+disparity+
" centersXY["+i+"][0]="+centersXY[i][0]+" centersXY["+i+"][1]="+centersXY[i][1]);
for (int chn = 0; chn <numcol; chn++) {
centerX = tileX * transform_size + transform_size/2 - shiftX;
centerY = tileY * transform_size + transform_size/2 - shiftY;
double [][] centersXY = geometryCorrection.getPortsCoordinates(
centerX,
centerY,
disparity);
if ((globalDebugLevel > 0) && (tileX >= debug_tileX - 2) && (tileX <= debug_tileX + 2) &&
(tileY >= debug_tileY - 2) && (tileY <= debug_tileY+2)) {
for (int i = 0; i < quad; i++) {
System.out.println("clt_aberrations_quad(): color="+chn+", tileX="+tileX+", tileY="+tileY+
" centerX="+centerX+" centerY="+centerY+" disparity="+disparity+
" centersXY["+i+"][0]="+centersXY[i][0]+" centersXY["+i+"][1]="+centersXY[i][1]);
}
}
}
for (int i = 0; i < quad; i++) {
fract_shiftsXY[i] = extract_correct_tile( // return a pair of resudual offsets
imade_data[i],
width, // image width
clt_kernels[i], // [color][tileY][tileX][band][pixel]
clt_data[i][chn][tileY][tileX], //double [][] clt_tile, // should be double [4][];
kernel_step,
transform_size,
dtt,
chn,
centersXY[i][0], // centerX, // center of aberration-corrected (common model) tile, X
centersXY[i][1], // centerY, //
(globalDebugLevel > 0) && (tileX == debug_tileX) && (tileY == debug_tileY) && (chn == 2), // external tile compare
no_deconvolution,
transpose);
}
if ((globalDebugLevel > 0) && (debug_tileX == tileX) && (debug_tileY == tileY) && (chn == 2)) {
showDoubleFloatArrays sdfa_instance = new showDoubleFloatArrays(); // just for debugging?
String [] titles = {"CC0","SC0","CS0","SS0","CC1","SC1","CS1","SS1","CC2","SC2","CS2","SS2","CC3","SC3","CS3","SS3"};
double [][] dbg_tile = new double [16][];
for (int i = 0; i < 16; i++) dbg_tile[i]=clt_data[i>>2][chn][tileY][tileX][i & 3];
sdfa_instance.showArrays(dbg_tile, transform_size, transform_size, true, "pre-shifted_x"+tileX+"_y"+tileY, titles);
}
if ((globalDebugLevel > -1) && (tileX >= debug_tileX - 2) && (tileX <= debug_tileX + 2) &&
(tileY >= debug_tileY - 2) && (tileY <= debug_tileY+2)) {
for (int i = 0; i < quad; i++) {
System.out.println("clt_aberrations_quad(): color="+chn+", tileX="+tileX+", tileY="+tileY+
" fract_shiftsXY["+i+"][0]="+fract_shiftsXY[i][0]+" fract_shiftsXY["+i+"][1]="+fract_shiftsXY[i][1]);
fract_shiftsXY[i] = extract_correct_tile( // return a pair of resudual offsets
image_data[i],
width, // image width
clt_kernels[i], // [color][tileY][tileX][band][pixel]
clt_data[i][chn][tileY][tileX], //double [][] clt_tile, // should be double [4][];
kernel_step,
transform_size,
dtt,
chn,
centersXY[i][0], // centerX, // center of aberration-corrected (common model) tile, X
centersXY[i][1], // centerY, //
(globalDebugLevel > 0) && (tileX == debug_tileX) && (tileY == debug_tileY) && (chn == 2), // external tile compare
no_deconvolution,
transpose);
}
if ((globalDebugLevel > 0) && (debug_tileX == tileX) && (debug_tileY == tileY) && (chn == 2)) {
showDoubleFloatArrays sdfa_instance = new showDoubleFloatArrays(); // just for debugging?
String [] titles = {"CC0","SC0","CS0","SS0","CC1","SC1","CS1","SS1","CC2","SC2","CS2","SS2","CC3","SC3","CS3","SS3"};
double [][] dbg_tile = new double [16][];
for (int i = 0; i < 16; i++) dbg_tile[i]=clt_data[i>>2][chn][tileY][tileX][i & 3];
sdfa_instance.showArrays(dbg_tile, transform_size, transform_size, true, "pre-shifted_x"+tileX+"_y"+tileY, titles);
}
if ((globalDebugLevel > 0) && (tileX >= debug_tileX - 2) && (tileX <= debug_tileX + 2) &&
(tileY >= debug_tileY - 2) && (tileY <= debug_tileY+2)) {
for (int i = 0; i < quad; i++) {
System.out.println("clt_aberrations_quad(): color="+chn+", tileX="+tileX+", tileY="+tileY+
" fract_shiftsXY["+i+"][0]="+fract_shiftsXY[i][0]+" fract_shiftsXY["+i+"][1]="+fract_shiftsXY[i][1]);
}
}
if (!no_fract_shift) {
// apply residual shift
for (int i = 0; i < quad; i++) {
fract_shift( // fractional shift in transform domain. Currently uses sin/cos - change to tables with 2? rotations
clt_data[i][chn][tileY][tileX], // double [][] clt_tile,
transform_size,
fract_shiftsXY[i][0], // double shiftX,
fract_shiftsXY[i][1], // double shiftY,
// (globalDebugLevel > 0) && (tileX == debug_tileX) && (tileY == debug_tileY)); // external tile compare
((globalDebugLevel > 0) && (chn==0) && (tileX >= debug_tileX - 2) && (tileX <= debug_tileX + 2) &&
(tileY >= debug_tileY - 2) && (tileY <= debug_tileY+2)));
}
if ((globalDebugLevel > 0) && (debug_tileX == tileX) && (debug_tileY == tileY)) {
showDoubleFloatArrays sdfa_instance = new showDoubleFloatArrays(); // just for debugging?
String [] titles = {"CC0","SC0","CS0","SS0","CC1","SC1","CS1","SS1","CC2","SC2","CS2","SS2","CC3","SC3","CS3","SS3"};
double [][] dbg_tile = new double [16][];
for (int i = 0; i < 16; i++) dbg_tile[i]=clt_data[i>>2][chn][tileY][tileX][i & 3];
sdfa_instance.showArrays(dbg_tile, transform_size, transform_size, true, "shifted_x"+tileX+"_y"+tileY, titles);
}
}
}
if (!no_fract_shift) {
// apply residual shift
// all color channels are done here
}
}
};
}
startAndJoin(threads);
return clt_data;
}
public double [][][][][][] clt_aberrations_quad_corr(
final double disparity,
final double [][][] image_data, // first index - number of image in a quad
// correlation results - final and partial
final double [][][][] clt_corr_combo, // [type][tilesY][tilesX][(2*transform_size-1)*(2*transform_size-1)] // if null - will not calculate
// [type][tilesY][tilesX] should be set by caller
// types: 0 - selected correlation (product+offset), 1 - sum
final double [][][][][] clt_corr_partial,// [tilesY][tilesX][quad]color][(2*transform_size-1)*(2*transform_size-1)] // if null - will not calculate
// [tilesY][tilesX] should be set by caller
final double [][] disparity_map, // [8][tilesY][tilesX], only [6][] is needed on input or null - do nat calculate
// last 2 - contrast, avg/ "geometric average)
final int width,
final double corr_fat_zero, // add to denominator to modify phase correlation (same units as data1, data2). <0 - pure sum
final boolean corr_sym,
final double corr_offset,
final double corr_red,
final double corr_blue,
final double corr_sigma,
final int corr_mask, // which pairs to combine in the combo: 1 - top, 2 bottom, 4 - left, 8 - right
final boolean corr_normalize, // normalize correlation results by rms
final double min_corr, // 0.0001; // minimal correlation value to consider valid
final double max_corr_sigma, // = 1.5; // weights of points around global max to find fractional
final double max_corr_radius, // = 2.5;
final GeometryCorrection geometryCorrection,
final double [][][][][][] clt_kernels, // [channel_in_quad][color][tileY][tileX][band][pixel] , size should match image (have 1 tile around)
final int kernel_step,
final int transform_size,
final int window_type,
final double shiftX, // shift image horizontally (positive - right) - just for testing
final double shiftY, // shift image vertically (positive - down)
final int debug_tileX,
final int debug_tileY,
final boolean no_fract_shift,
final boolean no_deconvolution,
// final boolean transpose,
final int threadsMax, // maximal number of threads to launch
final int globalDebugLevel)
{
final int quad = 4; // number of subcameras
final int numcol = 3; // number of colors
final int nChn = image_data[0].length;
final int height=image_data[0][0].length/width;
final int tilesX=width/transform_size;
final int tilesY=height/transform_size;
final int nTilesInChn=tilesX*tilesY;
// final int nTiles=tilesX*tilesY*nChn;
final double [][][][][][] clt_data = new double[quad][nChn][tilesY][tilesX][4][];
final Thread[] threads = newThreadArray(threadsMax);
final AtomicInteger ai = new AtomicInteger(0);
final double [] col_weights= new double [numcol]; // colors are RBG
col_weights[2] = 1.0/(1.0 + corr_red + corr_blue); // green color
col_weights[0] = corr_red * col_weights[2];
col_weights[1] = corr_blue * col_weights[2];
final int corr_size = transform_size * 2 -1;
final int [][] transpose_indices = new int [corr_size*(corr_size-1)/2][2];
int indx = 0;
for (int i =0; i < corr_size-1; i++){
for (int j = i+1; j < corr_size; j++){
transpose_indices[indx ][0] = i * corr_size + j;
transpose_indices[indx++][1] = j * corr_size + i;
}
}
if (globalDebugLevel > -1) {
System.out.println("clt_aberrations(): width="+width+" height="+height+" transform_size="+transform_size+
" debug_tileX="+debug_tileX+" debug_tileY="+debug_tileY+" globalDebugLevel="+globalDebugLevel);
}
final int [][] zi =
{{ 0, 1, 2, 3},
{-1, 0, -3, 2},
{-2, -3, 0, 1},
{ 3, -2, -1, 0}};
final int [][] corr_pairs ={ // {first, second, rot} rot: 0 - as is, 1 - swap y,x
{0,1,0},
{2,3,0},
{0,2,1},
{1,3,1}};
final int transform_len = transform_size * transform_size;
final double [] filter_direct= new double[transform_len];
if (corr_sigma == 0) {
filter_direct[0] = 1.0;
for (int i= 1; i<filter_direct.length;i++) filter_direct[i] =0;
} else {
for (int i = 0; i < transform_size; i++){
for (int j = 0; j < transform_size; j++){
filter_direct[i*transform_size+j] = Math.exp(-(i*i+j*j)/(2*corr_sigma));
}
}
}
// normalize
double sum = 0;
for (int i = 0; i < transform_size; i++){
for (int j = 0; j < transform_size; j++){
double d = filter_direct[i*transform_size+j];
d*=Math.cos(Math.PI*i/(2*transform_size))*Math.cos(Math.PI*j/(2*transform_size));
if (i > 0) d*= 2.0;
if (j > 0) d*= 2.0;
sum +=d;
}
}
for (int i = 0; i<filter_direct.length; i++){
filter_direct[i] /= sum;
}
DttRad2 dtt = new DttRad2(transform_size);
final double [] filter= dtt.dttt_iiie(filter_direct);
for (int i=0; i < filter.length;i++) filter[i] *= 2*transform_size;
// prepare disparity maps and weighs
final int max_search_radius = (int) Math.abs(max_corr_radius); // use negative max_corr_radius for squares instead of circles?
if (globalDebugLevel > -1){
int numPairs = 0;
for (int pair = 0; pair < corr_pairs.length; pair++) if (((corr_mask >> pair) & 1) != 0){
numPairs++;
}
System.out.println("max_corr_radius= "+max_corr_radius);
System.out.println("max_search_radius="+max_search_radius);
System.out.println("corr_mask= "+corr_mask);
System.out.println("corr_fat_zero= "+corr_fat_zero);
System.out.println("numPairs= "+numPairs);
}
if (disparity_map != null){
for (int i = 0; i<disparity_map.length;i++){
disparity_map[i] = new double [tilesY*tilesX];
}
}
final double [] corr_max_weights =((max_corr_sigma > 0) && (disparity_map != null))?
setMaxXYWeights(max_corr_sigma,max_search_radius): null; // here use square anyway
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
DttRad2 dtt = new DttRad2(transform_size);
dtt.set_window(window_type);
int tileY,tileX; // , chn;
// showDoubleFloatArrays sdfa_instance = new showDoubleFloatArrays(); // just for debugging?
double centerX; // center of aberration-corrected (common model) tile, X
double centerY; //
double [][] fract_shiftsXY = new double[quad][];
double [][] tcorr_combo = null; // [15*15] pixel space
// double [][] tcorr_tcombo = null; // [4][8*8] - transform space not used
double [][][] tcorr_partial = null; // [quad][numcol+1][15*15]
double [][][][] tcorr_tpartial = null; // [quad][numcol+1][4][8*8]
PolynomialApproximation pa = null;
if (corr_max_weights !=null) pa = new PolynomialApproximation(0); // debug level
for (int nTile = ai.getAndIncrement(); nTile < nTilesInChn; nTile = ai.getAndIncrement()) {
tileY = nTile /tilesX;
tileX = nTile % tilesX;
for (int chn = 0; chn <numcol; chn++) {
centerX = tileX * transform_size + transform_size/2 - shiftX;
centerY = tileY * transform_size + transform_size/2 - shiftY;
double [][] centersXY = geometryCorrection.getPortsCoordinates(
centerX,
centerY,
disparity);
if ((globalDebugLevel > 0) && (tileX >= debug_tileX - 2) && (tileX <= debug_tileX + 2) &&
(tileY >= debug_tileY - 2) && (tileY <= debug_tileY+2)) {
for (int i = 0; i < quad; i++) {
System.out.println("clt_aberrations_quad(): color="+chn+", tileX="+tileX+", tileY="+tileY+
" centerX="+centerX+" centerY="+centerY+" disparity="+disparity+
" centersXY["+i+"][0]="+centersXY[i][0]+" centersXY["+i+"][1]="+centersXY[i][1]);
}
}
for (int i = 0; i < quad; i++) {
fract_shift( // fractional shift in transform domain. Currently uses sin/cos - change to tables with 2? rotations
clt_data[i][chn][tileY][tileX], // double [][] clt_tile,
fract_shiftsXY[i] = extract_correct_tile( // return a pair of resudual offsets
image_data[i],
width, // image width
clt_kernels[i], // [color][tileY][tileX][band][pixel]
clt_data[i][chn][tileY][tileX], //double [][] clt_tile, // should be double [4][];
kernel_step,
transform_size,
fract_shiftsXY[i][0], // double shiftX,
fract_shiftsXY[i][1], // double shiftY,
// (globalDebugLevel > 0) && (tileX == debug_tileX) && (tileY == debug_tileY)); // external tile compare
((globalDebugLevel > 0) && (chn==0) && (tileX >= debug_tileX - 2) && (tileX <= debug_tileX + 2) &&
(tileY >= debug_tileY - 2) && (tileY <= debug_tileY+2)));
dtt,
chn,
centersXY[i][0], // centerX, // center of aberration-corrected (common model) tile, X
centersXY[i][1], // centerY, //
(globalDebugLevel > 0) && (tileX == debug_tileX) && (tileY == debug_tileY) && (chn == 2), // external tile compare
no_deconvolution,
false); // transpose);
}
if ((globalDebugLevel > 0) && (debug_tileX == tileX) && (debug_tileY == tileY)) {
if ((globalDebugLevel > 0) && (debug_tileX == tileX) && (debug_tileY == tileY) && (chn == 2)) {
showDoubleFloatArrays sdfa_instance = new showDoubleFloatArrays(); // just for debugging?
String [] titles = {"CC0","SC0","CS0","SS0","CC1","SC1","CS1","SS1","CC2","SC2","CS2","SS2","CC3","SC3","CS3","SS3"};
double [][] dbg_tile = new double [16][];
for (int i = 0; i < 16; i++) dbg_tile[i]=clt_data[i>>2][chn][tileY][tileX][i & 3];
sdfa_instance.showArrays(dbg_tile, transform_size, transform_size, true, "shifted_x"+tileX+"_y"+tileY, titles);
sdfa_instance.showArrays(dbg_tile, transform_size, transform_size, true, "pre-shifted_x"+tileX+"_y"+tileY, titles);
}
if ((globalDebugLevel > 0) && (tileX >= debug_tileX - 2) && (tileX <= debug_tileX + 2) &&
(tileY >= debug_tileY - 2) && (tileY <= debug_tileY+2)) {
for (int i = 0; i < quad; i++) {
System.out.println("clt_aberrations_quad(): color="+chn+", tileX="+tileX+", tileY="+tileY+
" fract_shiftsXY["+i+"][0]="+fract_shiftsXY[i][0]+" fract_shiftsXY["+i+"][1]="+fract_shiftsXY[i][1]);
}
}
if (!no_fract_shift) {
// apply residual shift
for (int i = 0; i < quad; i++) {
fract_shift( // fractional shift in transform domain. Currently uses sin/cos - change to tables with 2? rotations
clt_data[i][chn][tileY][tileX], // double [][] clt_tile,
transform_size,
fract_shiftsXY[i][0], // double shiftX,
fract_shiftsXY[i][1], // double shiftY,
// (globalDebugLevel > 0) && (tileX == debug_tileX) && (tileY == debug_tileY)); // external tile compare
((globalDebugLevel > 0) && (chn==0) && (tileX >= debug_tileX - 2) && (tileX <= debug_tileX + 2) &&
(tileY >= debug_tileY - 2) && (tileY <= debug_tileY+2)));
}
if ((globalDebugLevel > 0) && (debug_tileX == tileX) && (debug_tileY == tileY)) {
showDoubleFloatArrays sdfa_instance = new showDoubleFloatArrays(); // just for debugging?
String [] titles = {"CC0","SC0","CS0","SS0","CC1","SC1","CS1","SS1","CC2","SC2","CS2","SS2","CC3","SC3","CS3","SS3"};
double [][] dbg_tile = new double [16][];
for (int i = 0; i < 16; i++) dbg_tile[i]=clt_data[i>>2][chn][tileY][tileX][i & 3];
sdfa_instance.showArrays(dbg_tile, transform_size, transform_size, true, "shifted_x"+tileX+"_y"+tileY, titles);
}
}
}
// all color channels are done here
if (clt_corr_combo != null){ // not null - calculate correlations
tcorr_tpartial=new double[corr_pairs.length][numcol+1][4][transform_len];
// tcorr_tcombo = new double[quad][transform_len];
tcorr_partial = new double[quad][numcol+1][];
for (int pair = 0; pair < corr_pairs.length; pair++){
for (int chn = 0; chn <numcol; chn++){
double [][] data1 = clt_data[corr_pairs[pair][0]][chn][tileY][tileX];
double [][] data2 = clt_data[corr_pairs[pair][1]][chn][tileY][tileX];
for (int i = 0; i < transform_len; i++) {
double s1 = 0.0, s2=0.0;
for (int n = 0; n< 4; n++){
s1+=data1[n][i] * data1[n][i];
s2+=data2[n][i] * data2[n][i];
}
double scale = 1.0 / (Math.sqrt(s1*s2) + corr_fat_zero*corr_fat_zero); // squared to match units
for (int n = 0; n<4; n++){
tcorr_tpartial[pair][chn][n][i] = 0;
for (int k=0; k<4; k++){
if (zi[n][k] < 0)
tcorr_tpartial[pair][chn][n][i] -=
data1[-zi[n][k]][i] * data2[k][i];
else
tcorr_tpartial[pair][chn][n][i] +=
data1[zi[n][k]][i] * data2[k][i];
}
tcorr_tpartial[pair][chn][n][i] *= scale;
}
}
// got transform-domain correlation for the pair, 1 color
}
// calculate composite color
for (int i = 0; i < transform_len; i++) {
for (int n = 0; n<4; n++) {
tcorr_tpartial[pair][numcol][n][i] =
col_weights[0]* tcorr_tpartial[pair][0][n][i] +
col_weights[1]* tcorr_tpartial[pair][1][n][i] +
col_weights[2]* tcorr_tpartial[pair][2][n][i];
}
}
// now lpf (only last/composite color if do not preserve intermediate
int firstColor = (clt_corr_partial == null)? numcol : 0;
if (corr_sigma >0) {
for (int chn = firstColor; chn <= numcol; chn++){
for (int i = 0; i < transform_len; i++) {
for (int n = 0; n<4; n++) {
tcorr_tpartial[pair][chn][n][i] *= filter[i];
}
}
}
}
// convert to pixel domain - all or just composite color
for (int chn = firstColor; chn <= numcol; chn++){
for (int quadrant = 0; quadrant < 4; quadrant++){
int mode = ((quadrant << 1) & 2) | ((quadrant >> 1) & 1); // transpose
tcorr_tpartial[pair][chn][quadrant] =
dtt.dttt_iie(tcorr_tpartial[pair][chn][quadrant], mode, transform_size);
}
}
// convert from 4 quadrants to 15x15 centered tiles (each color or only composite)
for (int chn = firstColor; chn <= numcol; chn++){
tcorr_partial[pair][chn] = corr_unfold_tile(
tcorr_tpartial[pair][chn],
transform_size);
}
// transpose vertical pairs
if (corr_pairs[pair][2] != 0) {
for (int chn = firstColor; chn <= numcol; chn++){
for (int i = 0; i < transpose_indices.length; i++) {
double d = tcorr_partial[pair][chn][transpose_indices[i][0]];
tcorr_partial[pair][chn][transpose_indices[i][0]] = tcorr_partial[pair][chn][transpose_indices[i][1]];
tcorr_partial[pair][chn][transpose_indices[i][1]] = d;
//transpose_indices
}
}
}
// make symmetrical around the disparity direction (horizontal) (here using just average, not mul/sum mixture)
if (corr_sym){
for (int chn = firstColor; chn <= numcol; chn++){
for (int i = 1 ; i < transform_size; i++){
int indx1 = (transform_size - 1 - i) * corr_size;
int indx2 = (transform_size - 1 + i) * corr_size;
for (int j = 0; j< corr_size; j++){
int indx1j = indx1 + j;
int indx2j = indx2 + j;
tcorr_partial[pair][chn][indx1j] =
0.5* (tcorr_partial[pair][chn][indx1j] + tcorr_partial[pair][chn][indx2j]);
tcorr_partial[pair][chn][indx2j] = tcorr_partial[pair][chn][indx1j];
}
}
}
}
} // all pairs calculated
tcorr_combo = new double [2][corr_size * corr_size];
int numPairs = 0;
for (int pair = 0; pair < corr_pairs.length; pair++) if (((corr_mask >> pair) & 1) != 0){
numPairs++;
}
double avScale = 0.0;
if (numPairs > 0) {
boolean debugMax = (globalDebugLevel > -1) && (tileX == debug_tileX) && (tileY == debug_tileY);
avScale = 1.0/numPairs;
if (debugMax) {
System.out.println("avScale="+avScale+", corr_offset="+corr_offset);
}
if (corr_offset < 0) { // just add all partial correlations for composite color
for (int i = 0; i < tcorr_combo[0].length; i++){
tcorr_combo[0][i] = 0.0;
for (int pair = 0; pair < corr_pairs.length; pair++) if (((corr_mask >> pair) & 1) != 0){
tcorr_combo[0][i] += avScale*tcorr_partial[pair][numcol][i]; // only composite color channel
if (debugMax) {
System.out.println("tcorr_combo[0]["+i+"]="+tcorr_combo[0][i]+" tcorr_partial["+pair+"]["+numcol+"]["+i+"]="+tcorr_partial[pair][numcol][i]);
}
}
}
} else {
for (int i = 0; i < tcorr_combo[0].length; i++){
tcorr_combo[0][i] = 1.0;
for (int pair = 0; pair < corr_pairs.length; pair++) if (((corr_mask >> pair) & 1) != 0){
tcorr_combo[0][i] *= (tcorr_partial[pair][numcol][i] + corr_offset); // only composite color channel
if (debugMax) {
System.out.println("tcorr_combo[0]["+i+"]="+tcorr_combo[0][i]+" tcorr_partial["+pair+"]["+numcol+"]["+i+"]="+tcorr_partial[pair][numcol][i]);
}
/*
if (tcorr_combo[0][i] > 0){
tcorr_combo[0][i] = Math.pow(tcorr_combo[0][i],avScale) - corr_offset;
} else {
tcorr_combo[0][i] = 0; // -Math.pow(-tcorr_combo[0][i],avScale) - corr_offset;
}
*/
}
}
}
// calculate sum also
for (int i = 0; i < tcorr_combo[1].length; i++){
tcorr_combo[1][i] = 0.0;
for (int pair = 0; pair < corr_pairs.length; pair++) if (((corr_mask >> pair) & 1) != 0){
tcorr_combo[1][i] += avScale*tcorr_partial[pair][numcol][i]; // only composite color channel
if (debugMax) {
System.out.println("tcorr_combo[1]["+i+"]="+tcorr_combo[1][i]+" tcorr_partial["+pair+"]["+numcol+"]["+i+"]="+tcorr_partial[pair][numcol][i]);
}
}
}
double [] rms = new double [tcorr_combo.length];
for (int n = 0; n < rms.length; n++) rms[n] = 1.0;
if (corr_normalize){ // normalize both composite and sum by their RMS
for (int n = 0; n<tcorr_combo.length; n++){
rms[n] = 0;
for (int i = 0; i < tcorr_combo[n].length; i++){
rms[n] += tcorr_combo[n][i] * tcorr_combo[n][i];
}
rms[n] = Math.sqrt(rms[n]/tcorr_combo[n].length);
if (rms[n] > 0){
double k = 1.0/rms[n];
for (int i = 0; i < tcorr_combo[n].length; i++){
tcorr_combo[n][i] *= k;
}
}
}
}
// return results
for (int n = 0; n <clt_corr_combo.length; n++){
clt_corr_combo[n][tileY][tileX] = tcorr_combo[n];
}
if (clt_corr_partial != null){
clt_corr_partial[tileY][tileX] = tcorr_partial;
}
// calculate disparity map
if (disparity_map != null) {
int [] icorr_max =getMaxXYInt( // find integer pair or null if below threshold
tcorr_combo[0], // [data_size * data_size]
corr_size,
min_corr, // minimal value to consider (at integer location, not interpolated)
debugMax);
if (icorr_max != null){
double [] corr_max_XYi = {icorr_max[0],icorr_max[1]};
disparity_map[0][tileY*tilesX + tileX] = transform_size - 1 -corr_max_XYi[0];
disparity_map[1][tileY*tilesX + tileX] = transform_size - 1 -corr_max_XYi[1];
} else {
disparity_map[0][tileY*tilesX + tileX] = Double.NaN;
disparity_map[1][tileY*tilesX + tileX] = Double.NaN;
disparity_map[2][tileY*tilesX + tileX] = Double.NaN;
disparity_map[3][tileY*tilesX + tileX] = Double.NaN;
disparity_map[4][tileY*tilesX + tileX] = Double.NaN;
disparity_map[5][tileY*tilesX + tileX] = Double.NaN;
continue;
}
// for the integer maximum provide contrast and variety
int max_index = icorr_max[1]*corr_size + icorr_max[0];
disparity_map[6][tileY*tilesX + tileX] = tcorr_combo[0][max_index]; // correlation combo value at the integer maximum
// undo scaling caused by optional normalization
disparity_map[7][tileY*tilesX + tileX] = (rms[1]*tcorr_combo[1][max_index])/(rms[0]*tcorr_combo[0][max_index]); // correlation combo value at the integer maximum
// Calculate "center of mass" coordinates
double [] corr_max_XYm = getMaxXYCm( // get fractiona center as a "center of mass" inside circle/square from the integer max
tcorr_combo[0], // [data_size * data_size]
corr_size,
icorr_max, // integer center coordinates (relative to top left)
max_corr_radius, // positive - within that distance, negative - within 2*(-radius)+1 square
debugMax);
if (corr_max_XYm != null){
disparity_map[2][tileY*tilesX + tileX] = transform_size - 1 -corr_max_XYm[0];
disparity_map[3][tileY*tilesX + tileX] = transform_size - 1 -corr_max_XYm[1];
} else {
disparity_map[2][tileY*tilesX + tileX] = Double.NaN;
disparity_map[3][tileY*tilesX + tileX] = Double.NaN;
}
// Calculate polynomial interpolated maximum coordinates
double [] corr_max_XY = getMaxXYPoly( // get interpolated maximum coordinates using 2-nd degree polynomial
pa,
tcorr_combo[0], // [data_size * data_size]
corr_size,
icorr_max, // integer center coordinates (relative to top left)
corr_max_weights, // [(radius+1) * (radius+1)]
max_search_radius,
debugMax);
if (corr_max_XY != null){
disparity_map[4][tileY*tilesX + tileX] = transform_size - 1 -corr_max_XY[0];
disparity_map[5][tileY*tilesX + tileX] = transform_size - 1 -corr_max_XY[1];
} else {
disparity_map[4][tileY*tilesX + tileX] = Double.NaN;
disparity_map[5][tileY*tilesX + tileX] = Double.NaN;
}
}
}
} // end of if (clt_corr_combo != null)
}
}
};
......@@ -889,10 +1360,145 @@ public class ImageDtt {
startAndJoin(threads);
return clt_data;
}
// return weights for positive x,y, [(radius+a)*(radius+1)]
public double [] setMaxXYWeights(
double sigma,
int radius){ // ==3.0, ignore data outside sigma * nSigma
//
double [] weights = new double [(radius + 1)*(radius + 1)];
int indx = 0;
for (int i = 0; i <= radius; i ++){
for (int j = 0; j <= radius; j ++){
weights[indx++] = Math.exp(-(i*i+j*j)/(2*sigma));
}
}
return weights;
}
// find interpolated location of maximum, return {x,y} or null (if too low or non-existing)
public int [] getMaxXYInt( // find integer pair or null if below threshold
double [] data, // [data_size * data_size]
int data_size,
double minMax, // minimal value to consider (at integer location, not interpolated)
boolean debug)
{
int imx = 0;
for (int i = 1; i < data.length; i++){
if (data[imx] < data[i]){
imx = i;
}
}
if (data[imx] < minMax){
if (debug){
System.out.println("getMaxXYInt() -> null (data["+imx+"] = "+data[imx]+" < "+minMax);
}
return null;
}
int [] rslt = {imx % data_size, imx / data_size};
if (debug){
System.out.println("getMaxXYInt() -> "+rslt[0]+"/"+rslt[1]);
}
return rslt;
}
public double [] getMaxXYCm( // get fractiona center as a "center of mass" inside circle/square from the integer max
double [] data, // [data_size * data_size]
int data_size,
int [] icenter, // integer center coordinates (relative to top left)
double radius, // positive - within that distance, negative - within 2*(-radius)+1 square
boolean debug)
{
if (icenter == null) return null; //gigo
//calculate as "center of mass"
int iradius = (int) Math.abs(radius);
int ir2 = (int) (radius*radius);
boolean square = radius <0;
double s0 = 0, sx=0,sy = 0;
for (int y = - iradius ; y <= iradius; y++){
int dataY = icenter[1] +y;
if ((dataY >= 0) && (dataY < data_size)){
int y2 = y*y;
for (int x = - iradius ; x <= iradius; x++){
int dataX = icenter[0] +x;
if ((dataX >= 0) && (dataX < data_size) && (square || ((y2 + x * x) <= ir2))){
double d = data[dataY * data_size + dataX];
s0 += d;
sx += d * dataX;
sy += d * dataY;
}
}
}
}
double [] rslt = {sx / s0, sy / s0};
if (debug){
System.out.println("getMaxXYInt() -> "+rslt[0]+"/"+rslt[1]);
}
return rslt;
}
public double [] getMaxXYPoly( // get interpolated maximum coordinates using 2-nd degree polynomial
PolynomialApproximation pa,
double [] data, // [data_size * data_size]
int data_size,
int [] icenter, // integer center coordinates (relative to top left)
double [] weights, // [(radius+1) * (radius+1)]
int radius,
boolean debug)
{
// TODO: make sure it is within 1pxx1px square from the integer maximum? If not - return null and use center of mass instead?
if (pa == null) pa = new PolynomialApproximation();
if (icenter == null) return null; //gigo
double [][] zdata = {{0.0,0.0},{0.0},{0.0}};
// radius = 1;
double [][][] mdata = new double[(2 * radius + 1) * (2 * radius + 1)][3][];
int indx = 0;
for (int y = - radius ; y <= radius; y++){
int dataY = icenter[1] +y;
if ((dataY >= 0) && (dataY < data_size)){
int ay = (y >= 0)?y:-y;
for (int x = - radius ; x <= radius; x++){
int dataX = icenter[0] +x;
if ((dataX >= 0) && (dataX < data_size)){
int ax = (x >= 0) ? x: -x;
mdata[indx][0] = new double [2];
mdata[indx][0][0] = dataX;
mdata[indx][0][1] = dataY;
mdata[indx][1] = new double [1];
mdata[indx][1][0] = data[dataY * data_size + dataX];
mdata[indx][2] = new double [1];
mdata[indx][2][0] = weights[ay * (radius + 1) + ax];
indx++;
}
}
}
}
for (;indx < mdata.length; indx++){
mdata[indx] = zdata;
}
if (debug){
System.out.println("before: getMaxXYPoly(): icenter[0] = "+icenter[0]+" icenter[1] = "+icenter[1]);
for (int i = 0; i< mdata.length; i++){
System.out.println(i+": "+mdata[i][0][0]+"/"+mdata[i][0][1]+" z="+mdata[i][1][0]+" w="+mdata[i][2][0]);
}
}
double [] rslt = pa.quadraticMax2d(
mdata,
1.0E-30,//25, // 1.0E-15,
debug? 4:0);
if (debug){
System.out.println("after: getMaxXYPoly(): icenter[0] = "+icenter[0]+" icenter[1] = "+icenter[1]);
for (int i = 0; i< mdata.length; i++){
System.out.println(i+": "+mdata[i][0][0]+"/"+mdata[i][0][1]+" z="+mdata[i][1][0]+" w="+mdata[i][2][0]);
}
System.out.println("quadraticMax2d(mdata) --> "+((rslt==null)?"null":(rslt[0]+"/"+rslt[1])));
}
return rslt;
}
// perform 2d clt, result is [tileY][tileX][cc_sc_cs_ss][index_in_tile]
......@@ -1005,7 +1611,7 @@ public class ImageDtt {
final int width= tilesX * dct_size;
final int height= tilesY * dct_size;
final double debug_scale = 1.0 /((debug_mask & 1) + ((debug_mask >> 1) & 1) + ((debug_mask >> 2) & 1) + ((debug_mask >> 3) & 1));
if (globalDebugLevel > -1) {
if (globalDebugLevel > 0) {
System.out.println("iclt_2d():tilesX= "+tilesX);
System.out.println("iclt_2d():tilesY= "+tilesY);
System.out.println("iclt_2d():width= "+width);
......@@ -1452,9 +2058,59 @@ public class ImageDtt {
return rslt;
}
private double [] corr_unfold_tile(
double [][] qdata, // [4][transform_size*transform_size] data after DCT2 (pixel domain)
int transform_size
)
{
int corr_pixsize = transform_size * 2 - 1;
double corr_pixscale = 0.25;
double [] rslt = new double [corr_pixsize*corr_pixsize];
rslt[corr_pixsize*transform_size - transform_size] = corr_pixscale * qdata[0][0]; // center
for (int j = 1; j < transform_size; j++) { // for i == 0
rslt[corr_pixsize*transform_size - transform_size + j] = corr_pixscale * (qdata[0][j] + qdata[1][j-1]);
rslt[corr_pixsize*transform_size - transform_size - j] = corr_pixscale * (qdata[0][j] - qdata[1][j-1]);
}
for (int i = 1; i < transform_size; i++) {
rslt[corr_pixsize*(transform_size + i) - transform_size] =
corr_pixscale * (qdata[0][i*transform_size] + qdata[2][(i-1)*transform_size]);
rslt[corr_pixsize*(transform_size - i) - transform_size] =
corr_pixscale * (qdata[0][i*transform_size] - qdata[2][(i-1)*transform_size]);
for (int j = 1; j < transform_size; j++) {
rslt[corr_pixsize*(transform_size + i) - transform_size + j] =
corr_pixscale * (qdata[0][i* transform_size + j] +
qdata[1][i* transform_size + j - 1] +
qdata[2][(i-1)*transform_size + j] +
qdata[3][(i-1)*transform_size + j - 1]);
rslt[corr_pixsize*(transform_size + i) - transform_size - j] =
corr_pixscale * ( qdata[0][i* transform_size + j] +
-qdata[1][i* transform_size + j - 1] +
qdata[2][(i-1)*transform_size + j] +
-qdata[3][(i-1)*transform_size + j - 1]);
rslt[corr_pixsize*(transform_size - i) - transform_size + j] =
corr_pixscale * (qdata[0][i* transform_size + j] +
qdata[1][i* transform_size + j - 1] +
-qdata[2][(i-1)*transform_size + j] +
-qdata[3][(i-1)*transform_size + j - 1]);
rslt[corr_pixsize*(transform_size - i) - transform_size - j] =
corr_pixscale * (qdata[0][i* transform_size + j] +
-qdata[1][i* transform_size + j - 1] +
-qdata[2][(i-1)*transform_size + j] +
qdata[3][(i-1)*transform_size + j - 1]);
}
}
return rslt;
}
// extract correlation result in linescan order (for visualization)
public double [] corr_dbg(
final double [][][] corr_data,
final double border_contrast,
final int threadsMax, // maximal number of threads to launch
final int globalDebugLevel)
{
......@@ -1483,10 +2139,10 @@ public class ImageDtt {
tileX = nTile - tileY * tilesX;
for (int i = 0; i < corr_size;i++){
System.arraycopy(corr_data[tileY][tileX], corr_size* i, corr_data_out, ((tileY*tile_size + i) *tilesX + tileX)*tile_size , corr_size);
corr_data_out[((tileY*tile_size + i) *tilesX + tileX)*tile_size+corr_size] = (i & 1) - 0.5;
corr_data_out[((tileY*tile_size + i) *tilesX + tileX)*tile_size+corr_size] = border_contrast*((i & 1) - 0.5);
}
for (int i = 0; i < tile_size; i++){
corr_data_out[((tileY*tile_size + corr_size) *tilesX + tileX)*tile_size+i] = (i & 1) - 0.5;
corr_data_out[((tileY*tile_size + corr_size) *tilesX + tileX)*tile_size+i] = border_contrast*((i & 1) - 0.5);
}
}
}
......@@ -1496,6 +2152,72 @@ public class ImageDtt {
return corr_data_out;
}
// extract correlation result in linescan order (for visualization)
public double [][] corr_partial_dbg(
final double [][][][][] corr_data,
final double border_contrast,
final int threadsMax, // maximal number of threads to launch
final int globalDebugLevel)
{
final int tilesY=corr_data.length;
final int tilesX=corr_data[0].length;
final int pairs= corr_data[0][0].length;
final int colors=corr_data[0][0][0].length;
final int nTiles=tilesX*tilesY;
final int corr_size = (int) Math.round(Math.sqrt(corr_data[0][0][0][0].length));
final int tile_size = corr_size+1;
final int corr_len = corr_size*corr_size;
System.out.println("corr_partial_dbg(): tilesY="+tilesY+", tilesX="+tilesX+", corr_size="+corr_size+", corr_len="+corr_len+
" pairs="+pairs +" colors = "+colors+" tile_size="+tile_size);
final double [][] corr_data_out = new double[pairs*colors][tilesY*tilesX*tile_size*tile_size];
// final String [] colorNames = {"red","blue","green","composite"};
final Thread[] threads = newThreadArray(threadsMax);
final AtomicInteger ai = new AtomicInteger(0);
for (int pair = 0; pair< pairs; pair++) {
for (int nColor = 0; nColor < colors; nColor++) {
for (int i=0; i<corr_data_out.length;i++) corr_data_out[pair*colors+nColor][i]= 0;
}
}
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
int tileY,tileX;
for (int nTile = ai.getAndIncrement(); nTile < nTiles; nTile = ai.getAndIncrement()) {
tileY = nTile/tilesX;
tileX = nTile - tileY * tilesX;
for (int pair = 0; pair< pairs; pair++) {
for (int nColor = 0; nColor < colors; nColor++) {
int indx = pair*colors+nColor;
for (int i = 0; i < corr_size;i++){
System.arraycopy(
corr_data[tileY][tileX][pair][nColor],
corr_size* i,
corr_data_out[indx],
((tileY*tile_size + i) *tilesX + tileX)*tile_size ,
corr_size);
corr_data_out[indx][((tileY*tile_size + i) *tilesX + tileX)*tile_size+corr_size] = border_contrast*((i & 1) - 0.5);
}
for (int i = 0; i < tile_size; i++){
corr_data_out[indx][((tileY*tile_size + corr_size) *tilesX + tileX)*tile_size+i] = border_contrast*((i & 1) - 0.5);
}
}
}
}
}
};
}
startAndJoin(threads);
return corr_data_out;
}
public double [][][][][] cltStack(
......@@ -1810,7 +2532,7 @@ public class ImageDtt {
// kernel [0][0] is centered at (-kernel_step/2,-kernel_step/2)
public double [] extract_correct_tile( // return a pair of resudual offsets
double [][] imade_data,
double [][] image_data,
int width, // image width
double [][][][][] clt_kernels, // [color][tileY][tileX][band][pixel]
double [][] clt_tile, // should be double [4][];
......@@ -1825,7 +2547,7 @@ public class ImageDtt {
boolean dbg_transpose)
{
double [] residual_shift = new double[2];
int height = imade_data[0].length/width;
int height = image_data[0].length/width;
int transform_size2 = 2* transform_size;
// if (dtt == null) dtt = new DttRad2(transform_size); should have window set up
double [] tile_in = new double [4*transform_size*transform_size];
......@@ -1854,7 +2576,7 @@ public class ImageDtt {
if ((ctile_left >= 0) && (ctile_left < (width - transform_size2)) &&
(ctile_top >= 0) && (ctile_top < (height - transform_size2))) {
for (int i = 0; i < transform_size2; i++){
System.arraycopy(imade_data[chn], (ctile_top + i) * width + ctile_left, tile_in, transform_size2 * i, transform_size2);
System.arraycopy(image_data[chn], (ctile_top + i) * width + ctile_left, tile_in, transform_size2 * i, transform_size2);
}
} else { // copy by 1
for (int i = 0; i < transform_size2; i++){
......@@ -1865,7 +2587,7 @@ public class ImageDtt {
int pj = ctile_left + j;
if (pj < 0) pj = 0;
else if (pj >= width) pj = width - 1;
tile_in[transform_size2 * i + j] = imade_data[chn][pi * width + pj];
tile_in[transform_size2 * i + j] = image_data[chn][pi * width + pj];
}
}
}
......
......@@ -163,11 +163,17 @@ public class PolynomialApproximation {
}
public double[] quadraticMax2d (double [][][] data){
return quadraticMax2d (data,1.0E-15);
}
public double[] quadraticMax2d (double [][][] data,double thresholdQuad){
double [][] coeff=quadraticApproximation(data, false);
public double[] quadraticMax2d (double [][][] data, double thresholdQuad){
return quadraticMax2d (data,thresholdQuad, debugLevel);
}
public double[] quadraticMax2d (double [][][] data,double thresholdQuad, int debugLevel){
double [][] coeff=quadraticApproximation(data, false, 1.0E-20, thresholdQuad,debugLevel);
if (coeff==null) return null;
if (coeff[0].length<6) return null;
double [][] aM={
......@@ -203,28 +209,55 @@ public class PolynomialApproximation {
* returns null if not enough data even for the linear approximation
*/
public double [][] quadraticApproximation(
double [][][] data,
boolean forceLinear // use linear approximation
boolean forceLinear) // use linear approximation
{
return quadraticApproximation(
data,
forceLinear, // use linear approximation
1.0E-10, // threshold ratio of matrix determinant to norm for linear approximation (det too low - fail) 11.0E-10 failed where it shouldn't?
1.0E-15, // threshold ratio of matrix determinant to norm for quadratic approximation (det too low - fail)
this.debugLevel);
}
public double [][] quadraticApproximation(
double [][][] data,
boolean forceLinear, // use linear approximation
int debugLevel
){
return quadraticApproximation(
data,
forceLinear, // use linear approximation
1.0E-10, // threshold ratio of matrix determinant to norm for linear approximation (det too low - fail) 11.0E-10 failed where it shouldn't?
1.0E-15); // threshold ratio of matrix determinant to norm for quadratic approximation (det too low - fail)
1.0E-15, // threshold ratio of matrix determinant to norm for quadratic approximation (det too low - fail)
debugLevel);
/*
1.0E-12, // threshold ratio of matrix determinant to norm for linear approximation (det too low - fail) 11.0E-10 failed where it shouldn't?
1.0E-20); // threshold ratio of matrix determinant to norm for quadratic approximation (det too low - fail)
*/
}
public double [][] quadraticApproximation(
public double [][] quadraticApproximation(
double [][][] data,
boolean forceLinear, // use linear approximation
double thresholdLin, // threshold ratio of matrix determinant to norm for linear approximation (det too low - fail)
double thresholdQuad) // threshold ratio of matrix determinant to norm for quadratic approximation (det too low - fail)
{
return quadraticApproximation(
data,
forceLinear, // use linear approximation
1.0E-10, // threshold ratio of matrix determinant to norm for linear approximation (det too low - fail) 11.0E-10 failed where it shouldn't?
1.0E-15, // threshold ratio of matrix determinant to norm for quadratic approximation (det too low - fail)
this.debugLevel);
}
public double [][] quadraticApproximation(
double [][][] data,
boolean forceLinear, // use linear approximation
double thresholdLin, // threshold ratio of matrix determinant to norm for linear approximation (det too low - fail)
double thresholdQuad // threshold ratio of matrix determinant to norm for quadratic approximation (det too low - fail)
double thresholdQuad, // threshold ratio of matrix determinant to norm for quadratic approximation (det too low - fail)
int debugLevel
){
if (this.debugLevel>3) System.out.println("quadraticApproximation(...), debugLevel="+this.debugLevel+":");
if (debugLevel>3) System.out.println("quadraticApproximation(...), debugLevel="+debugLevel+":");
/* ix, iy - the location of the point with maximal value. We'll approximate the vicinity of that maximum using a
* second degree polynomial:
Z(x,y)~=A*x^2+B*y^2+C*x*y+D*x+E*y+F
......@@ -365,7 +398,7 @@ public class PolynomialApproximation {
{S10,S01,S00}};
Matrix M=new Matrix (mAarrayL);
Matrix Z;
if (this.debugLevel>3) System.out.println(">>> n="+n+" det_lin="+M.det()+" norm_lin="+normMatix(mAarrayL));
if (debugLevel>3) System.out.println(">>> n="+n+" det_lin="+M.det()+" norm_lin="+normMatix(mAarrayL));
double nmL=normMatix(mAarrayL);
if ((nmL==0.0) || (Math.abs(M.det())/nmL<thresholdLin)){
// return average value for each channel
......@@ -398,10 +431,17 @@ public class PolynomialApproximation {
{S21,S03,S12,S11,S02,S01},
{S20,S02,S11,S10,S01,S00}};
M=new Matrix (mAarrayQ);
if (debugLevel>3) System.out.println(" n="+n+" det_quad="+M.det()+" norm_quad="+normMatix(mAarrayQ)+" data.length="+data.length);
if (debugLevel>3) {
System.out.println(" n="+n+" det_quad="+M.det()+" norm_quad="+normMatix(mAarrayQ)+" data.length="+data.length);
M.print(10,5);
}
double nmQ=normMatix(mAarrayQ);
if ((nmQ==0.0) || (Math.abs(M.det())/normMatix(mAarrayQ)<thresholdQuad)) {
if (debugLevel>0) System.out.println("Using linear approximation, M.det()="+M.det()+" normMatix(mAarrayQ)="+normMatix(mAarrayQ)); //did not happen
if (debugLevel>0) System.out.println("Using linear approximation, M.det()="+M.det()+
" normMatix(mAarrayQ)="+normMatix(mAarrayQ)+
", thresholdQuad="+thresholdQuad+
", nmQ="+nmQ+
", Math.abs(M.det())/normMatix(mAarrayQ)="+(Math.abs(M.det())/normMatix(mAarrayQ))); //did not happen
return ABCDEF; // not enough data for the quadratic approximation, return linear
}
// double [] zAarrayQ={SZ20,SZ02,SZ11,SZ10,SZ01,SZ00};
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment