Commit f19a4557 authored by Andrey Filippov's avatar Andrey Filippov

Adding support for a dual-quad camera for CNN ground truth

parent 6db01aca
...@@ -521,6 +521,7 @@ G= Y +Pr*(- 2*Kr*(1-Kr))/Kg + Pb*(-2*Kb*(1-Kb))/Kg ...@@ -521,6 +521,7 @@ G= Y +Pr*(- 2*Kr*(1-Kr))/Kg + Pb*(-2*Kb*(1-Kb))/Kg
public static class ColorGainsParameters { public static class ColorGainsParameters {
public static final String AUX_PREFIX = "AUX-";
public double[] gain={ public double[] gain={
1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0, 1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,
1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0, 1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,
...@@ -548,7 +549,7 @@ G= Y +Pr*(- 2*Kr*(1-Kr))/Kg + Pb*(-2*Kb*(1-Kb))/Kg ...@@ -548,7 +549,7 @@ G= Y +Pr*(- 2*Kr*(1-Kr))/Kg + Pb*(-2*Kb*(1-Kb))/Kg
properties.setProperty(prefix+"balanceBlue_"+i,this.balanceBlue[i]+""); properties.setProperty(prefix+"balanceBlue_"+i,this.balanceBlue[i]+"");
} }
} }
public void getProperties(String prefix,Properties properties){ public boolean getProperties(String prefix,Properties properties){
if (properties.getProperty(prefix+"channels")!=null) { if (properties.getProperty(prefix+"channels")!=null) {
int numChannels=Integer.parseInt(properties.getProperty(prefix+"channels")); int numChannels=Integer.parseInt(properties.getProperty(prefix+"channels"));
this.gain= new double[numChannels]; this.gain= new double[numChannels];
...@@ -559,7 +560,9 @@ G= Y +Pr*(- 2*Kr*(1-Kr))/Kg + Pb*(-2*Kb*(1-Kb))/Kg ...@@ -559,7 +560,9 @@ G= Y +Pr*(- 2*Kr*(1-Kr))/Kg + Pb*(-2*Kb*(1-Kb))/Kg
this.balanceRed[i]= Double.parseDouble(properties.getProperty(prefix+"balanceRed_"+i)); this.balanceRed[i]= Double.parseDouble(properties.getProperty(prefix+"balanceRed_"+i));
this.balanceBlue[i]=Double.parseDouble(properties.getProperty(prefix+"balanceBlue_"+i)); this.balanceBlue[i]=Double.parseDouble(properties.getProperty(prefix+"balanceBlue_"+i));
} }
return true;
} }
return false;
} }
public void modifyNumChannels(int numChannels){ public void modifyNumChannels(int numChannels){
...@@ -599,6 +602,43 @@ G= Y +Pr*(- 2*Kr*(1-Kr))/Kg + Pb*(-2*Kb*(1-Kb))/Kg ...@@ -599,6 +602,43 @@ G= Y +Pr*(- 2*Kr*(1-Kr))/Kg + Pb*(-2*Kb*(1-Kb))/Kg
return true; return true;
} }
public boolean showDialog(ColorGainsParameters aux) {
GenericJTabbedDialog gd = new GenericJTabbedDialog("Individual channels colors/gains", 600,1000);
if (aux != null) gd.addTab("Main camera");
for (int i =0; i<this.gain.length;i++){
gd.addMessage(String.format("=== CHANNEL %02d ===",i));
gd.addNumericField(String.format("%02d: Gain (brightness)",i), this.gain[i], 3);
gd.addNumericField(String.format("%02d: Balance Red/Green",i), this.balanceRed[i], 3);
gd.addNumericField(String.format("%02d: Balance Blue/Green",i), this.balanceBlue[i], 3);
}
if (aux != null) {
gd.addTab("Auxiliary camera");
for (int i =0; i<this.gain.length;i++){
gd.addMessage(String.format("=== CHANNEL %02d ===",i));
gd.addNumericField(String.format("%02d: Gain (brightness)",i), aux.gain[i], 3);
gd.addNumericField(String.format("%02d: Balance Red/Green",i), aux.balanceRed[i], 3);
gd.addNumericField(String.format("%02d: Balance Blue/Green",i), aux.balanceBlue[i], 3);
}
}
gd.showDialog();
if (gd.wasCanceled()) return false;
for (int i =0; i<this.gain.length;i++){
this.gain[i]= gd.getNextNumber();
this.balanceRed[i]= gd.getNextNumber();
this.balanceBlue[i]=gd.getNextNumber();
}
if (aux != null) {
for (int i =0; i<this.gain.length;i++){
aux.gain[i]= gd.getNextNumber();
aux.balanceRed[i]= gd.getNextNumber();
aux.balanceBlue[i]=gd.getNextNumber();
}
}
return true;
}
} }
......
...@@ -37,6 +37,7 @@ import ij.gui.GenericDialog; ...@@ -37,6 +37,7 @@ import ij.gui.GenericDialog;
public class EyesisCorrectionParameters { public class EyesisCorrectionParameters {
public static class CorrectionParameters{ public static class CorrectionParameters{
public static final String AUX_PREFIX = "AUX-";
public boolean swapSubchannels01= true; // false; // (false: 0-1-2, true - 1-0-2) public boolean swapSubchannels01= true; // false; // (false: 0-1-2, true - 1-0-2)
public boolean split= true; public boolean split= true;
public boolean vignetting= true; public boolean vignetting= true;
...@@ -75,46 +76,46 @@ public class EyesisCorrectionParameters { ...@@ -75,46 +76,46 @@ public class EyesisCorrectionParameters {
public boolean zcorrect= true; public boolean zcorrect= true;
public boolean saveSettings = true; public boolean saveSettings = true;
public String [] sourcePaths={}; public String [] sourcePaths= {};
public String sourceDirectory=""; public String sourceDirectory= "";
public String sourcePrefix=""; public String sourcePrefix= "";
public String sourceSuffix=".tiff"; //".jp4" public String sourceSuffix= ".tiff"; //".jp4"
public int firstSubCamera=1; // 0 or 1 public int firstSubCamera= 1; // channel index in source file names
public String sensorDirectory=""; public int numSubCameras= 4; // channel index in source file names
public String sensorPrefix="sensor-"; public String sensorDirectory= "";
public String sensorSuffix=".calib-tiff"; // fixed in PixelMapping public String sensorPrefix= "sensor-";
public String sensorSuffix= ".calib-tiff"; // fixed in PixelMapping
public String sharpKernelDirectory="";
public String sharpKernelPrefix="sharpKernel-"; public String sharpKernelDirectory= "";
public String sharpKernelSuffix=".kernel-tiff"; public String sharpKernelPrefix= "sharpKernel-";
public String smoothKernelDirectory=""; public String sharpKernelSuffix= ".kernel-tiff";
public String smoothKernelPrefix="smoothKernel-"; public String smoothKernelDirectory= "";
public String smoothKernelSuffix=".kernel-tiff"; public String smoothKernelPrefix= "smoothKernel-";
public String dctKernelDirectory=""; public String smoothKernelSuffix= ".kernel-tiff";
public String dctKernelPrefix="dct-"; public String dctKernelDirectory= "";
public String dctSymSuffix=".sym-tiff"; public String dctKernelPrefix= "dct-";
public String dctAsymSuffix=".asym-tiff"; public String dctSymSuffix= ".sym-tiff";
public String dctAsymSuffix= ".asym-tiff";
public String equirectangularDirectory=""; public String equirectangularDirectory="";
public String equirectangularPrefix=""; public String equirectangularPrefix= "";
public String equirectangularSuffix=".eqr-tiff"; public String equirectangularSuffix= ".eqr-tiff";
public boolean equirectangularCut=true; public boolean equirectangularCut= true;
public String planeMapPrefix=""; public String planeMapPrefix= "";
public String planeMapSuffix=".plane-proj-tiff"; public String planeMapSuffix= ".plane-proj-tiff";
public boolean usePlaneProjection=false; // public boolean usePlaneProjection= false; //
public boolean planeAsJPEG= true; // save de-warped image as JPEG (only if equirectangularFormat==0) public boolean planeAsJPEG= true; // save de-warped image as JPEG (only if equirectangularFormat==0)
// public String equirectangularSuffixA="A.eqr-tiff"; // or the roll-over part public String resultsDirectory= "";
public String resultsDirectory=""; public boolean removeUnusedSensorData= false;
public boolean removeUnusedSensorData=false; public int exposureCorrectionMode= 2; // - 0 - none, 1 - absolute, 2 - relative
public int exposureCorrectionMode=2; // - 0 - none, 1 - absolute, 2 - relative public double referenceExposure= 0.0003; // 3/10000 sec, used in absolute mode only
public double referenceExposure=0.0003; // 3/10000 sec, used in absolute mode only public double relativeExposure= 0.5; // 0.0 - use shortest (darken), 1.0 - use longest (brighten)
public double relativeExposure=0.5; // 0.0 - use shortest (darken), 1.0 - use longest (brighten)
public String cltKernelDirectory= "";
public String cltKernelDirectory=""; public String cltKernelPrefix= "clt-";
public String cltKernelPrefix="clt-"; public String cltSuffix= ".clt-tiff";
public String cltSuffix=".clt-tiff"; public boolean use_x3d_subdirs = true;
public boolean use_x3d_subdirs = true; public String x3dSubdirPrefix= "";
public String x3dSubdirPrefix=""; public String x3dSubdirSuffix= "";
public String x3dSubdirSuffix="";
// CLT 3d batch parameters // CLT 3d batch parameters
...@@ -133,6 +134,141 @@ public class EyesisCorrectionParameters { ...@@ -133,6 +134,141 @@ public class EyesisCorrectionParameters {
public String x3dModelVersion="v01"; public String x3dModelVersion="v01";
public String x3dDirectory=""; public String x3dDirectory="";
public CorrectionParameters getAux() {
return aux_camera;
}
public CorrectionParameters aux_camera = null; // auxiliarry camera parameters
// public boolean use_aux = true; // Generate debug images if a single set is selected
public void updateAuxFromMain() { // from master to aux
if (aux_camera == null) {
aux_camera = new CorrectionParameters();
initAuxFromMain(aux_camera);
} else {
updateAuxFromMain(aux_camera);
}
}
public void updateAuxFromMain(CorrectionParameters cp) { // from master to aux
cp.split = this.split;
cp.vignetting= this.vignetting;
cp.pixelDefects= this.pixelDefects;
cp.pixelDefectsThreshold= this.pixelDefectsThreshold;
cp.debayer= this.debayer;
cp.showDebayerEnergy = this.showDebayerEnergy;
cp.saveDebayerEnergy= this.saveDebayerEnergy;
cp.deconvolve= this.deconvolve;
cp.combine= this.combine;
cp.showDenoiseMask= this.showDenoiseMask;
cp.saveDenoiseMask= this.saveDenoiseMask;
cp.showChromaDenoiseMask= this.showChromaDenoiseMask;
cp.saveChromaDenoiseMask= this.saveChromaDenoiseMask;
cp.showNoiseGains= this.showNoiseGains;
cp.saveNoiseGains= this.saveNoiseGains;
cp.colorProc= this.colorProc;
cp.blueProc= this.blueProc;
cp.toRGB= this.toRGB;
cp.rotate= this.rotate;
cp.crop= this.crop;
cp.equirectangularFormat= this.equirectangularFormat;
cp.outputRangeInt= this.outputRangeInt;
cp.outputRangeFP= this.outputRangeFP;
cp.imageJTags= this.imageJTags;
cp.jpeg= this.jpeg;
cp.png= this.png;
cp.save= this.save;
cp.save16= this.save16;
cp.save32= this.save32;
cp.show= this.show;
cp.JPEG_quality= this.JPEG_quality;
cp.JPEG_scale= this.JPEG_scale;
cp.equirectangular= this.equirectangular;
cp.zcorrect= this.zcorrect;
cp.saveSettings= this.saveSettings;
cp.sourceDirectory= this.sourceDirectory;
cp.sourcePrefix= this.sourcePrefix;
cp.sourceSuffix= this.sourceSuffix;
// cp.firstSubCamera= this.firstSubCamera;
// cp.numSubCameras= this.numSubCameras;
// cp.sensorDirectory= this.sensorDirectory;
// cp.sensorPrefix= this.sensorPrefix;
// cp.sensorSuffix= this.sensorSuffix;
cp.sharpKernelDirectory= this.sharpKernelDirectory;
cp.sharpKernelPrefix= this.sharpKernelPrefix;
cp.sharpKernelSuffix= this.sharpKernelSuffix;
cp.smoothKernelDirectory= this.smoothKernelDirectory;
cp.smoothKernelPrefix= this.smoothKernelPrefix;
cp.smoothKernelSuffix= this.smoothKernelSuffix;
cp.dctKernelDirectory= this.dctKernelDirectory;
cp.dctKernelPrefix= this.dctKernelPrefix;
cp.dctSymSuffix= this.dctSymSuffix;
cp.dctAsymSuffix= this.dctAsymSuffix;
cp.equirectangularDirectory=this.equirectangularDirectory;
cp.equirectangularPrefix= this.equirectangularPrefix;
cp.equirectangularSuffix= this.equirectangularSuffix;
cp.equirectangularCut= this.equirectangularCut;
cp.planeMapPrefix= this.planeMapPrefix;
cp.planeMapSuffix= this.planeMapSuffix;
cp.usePlaneProjection= this.usePlaneProjection;
cp.planeAsJPEG= this.planeAsJPEG;
// cp.resultsDirectory= this.resultsDirectory;
cp.removeUnusedSensorData= this.removeUnusedSensorData;
if (this.sourcePaths!=null) {
cp.sourcePaths=new String[this.sourcePaths.length];
for (int i=0;i<this.sourcePaths.length;i++){
cp.sourcePaths[i] = this.sourcePaths[i];
}
}
cp.exposureCorrectionMode= this.exposureCorrectionMode;
cp.referenceExposure= this.referenceExposure;
cp.relativeExposure= this.relativeExposure;
cp.swapSubchannels01= this.swapSubchannels01;
// cp.cltKernelDirectory= this.cltKernelDirectory;
// cp.cltKernelPrefix= this.cltKernelPrefix;
// cp.cltSuffix= this.cltSuffix;
cp.x3dDirectory= this.x3dDirectory;
cp.use_x3d_subdirs= this.use_x3d_subdirs;
cp.x3dSubdirPrefix= this.x3dSubdirPrefix;
cp.x3dSubdirSuffix= this.x3dSubdirSuffix;
cp.x3dModelVersion= this.x3dModelVersion;
cp.clt_batch_apply_man= this.clt_batch_apply_man;
cp.clt_batch_extrinsic= this.clt_batch_extrinsic;
cp.clt_batch_poly= this.clt_batch_poly;
cp.clt_batch_4img= this.clt_batch_4img;
cp.clt_batch_explore= this.clt_batch_explore;
cp.clt_batch_surf= this.clt_batch_surf;
cp.clt_batch_assign= this.clt_batch_assign;
cp.clt_batch_gen3d= this.clt_batch_gen3d;
cp.clt_batch_dbg1= this.clt_batch_dbg1;
}
public void initAuxFromMain(CorrectionParameters cp) { // from master to aux
updateAuxFromMain(cp); // common parameters
// empty to prevent accidental use of the wrong kernels/sesnor calibration files
cp.sensorDirectory= ""; // this.sensorDirectory;
cp.cltKernelDirectory= ""; // this.cltKernelDirectory;
cp.resultsDirectory= this.resultsDirectory+"/aux";
cp.firstSubCamera= this.firstSubCamera + this.numSubCameras;
cp.numSubCameras= this.numSubCameras;
cp.sensorPrefix= ""; // this.sensorPrefix;
cp.sensorSuffix= this.sensorSuffix;
cp.cltKernelPrefix= this.cltKernelPrefix;
cp.cltSuffix= this.cltSuffix;
}
public void auxFromExternal(CorrectionParameters ecp) { // from master to aux
this.aux_camera.sensorDirectory= ecp.sensorDirectory;
this.aux_camera.cltKernelDirectory= ecp.cltKernelDirectory;
this.aux_camera.resultsDirectory= ecp.resultsDirectory+"/aux";
this.aux_camera.firstSubCamera= ecp.firstSubCamera;
this.aux_camera.numSubCameras= ecp.numSubCameras;
this.aux_camera.sensorPrefix= ecp.sensorPrefix;
this.aux_camera.sensorSuffix= ecp.sensorSuffix;
this.aux_camera.cltKernelPrefix= ecp.cltKernelPrefix;
this.aux_camera.cltSuffix= ecp.cltSuffix;
}
public void setProperties(String prefix,Properties properties){ public void setProperties(String prefix,Properties properties){
properties.setProperty(prefix+"split",this.split+""); properties.setProperty(prefix+"split",this.split+"");
properties.setProperty(prefix+"vignetting",this.vignetting+""); properties.setProperty(prefix+"vignetting",this.vignetting+"");
...@@ -174,6 +310,7 @@ public class EyesisCorrectionParameters { ...@@ -174,6 +310,7 @@ public class EyesisCorrectionParameters {
properties.setProperty(prefix+"sourcePrefix",this.sourcePrefix); properties.setProperty(prefix+"sourcePrefix",this.sourcePrefix);
properties.setProperty(prefix+"sourceSuffix",this.sourceSuffix); properties.setProperty(prefix+"sourceSuffix",this.sourceSuffix);
properties.setProperty(prefix+"firstSubCamera",this.firstSubCamera+""); properties.setProperty(prefix+"firstSubCamera",this.firstSubCamera+"");
properties.setProperty(prefix+"numSubCameras", this.numSubCameras+"");
properties.setProperty(prefix+"sensorDirectory",this.sensorDirectory); properties.setProperty(prefix+"sensorDirectory",this.sensorDirectory);
properties.setProperty(prefix+"sensorPrefix",this.sensorPrefix); properties.setProperty(prefix+"sensorPrefix",this.sensorPrefix);
...@@ -210,6 +347,7 @@ public class EyesisCorrectionParameters { ...@@ -210,6 +347,7 @@ public class EyesisCorrectionParameters {
properties.setProperty(prefix+"sourcePath"+i,this.sourcePaths[i]); properties.setProperty(prefix+"sourcePath"+i,this.sourcePaths[i]);
} }
} }
properties.setProperty(prefix+"exposureCorrectionMode",this.exposureCorrectionMode+""); properties.setProperty(prefix+"exposureCorrectionMode",this.exposureCorrectionMode+"");
properties.setProperty(prefix+"referenceExposure", this.referenceExposure+""); properties.setProperty(prefix+"referenceExposure", this.referenceExposure+"");
properties.setProperty(prefix+"relativeExposure", this.relativeExposure+""); properties.setProperty(prefix+"relativeExposure", this.relativeExposure+"");
...@@ -237,6 +375,20 @@ public class EyesisCorrectionParameters { ...@@ -237,6 +375,20 @@ public class EyesisCorrectionParameters {
properties.setProperty(prefix+"clt_batch_assign", this.clt_batch_assign+""); properties.setProperty(prefix+"clt_batch_assign", this.clt_batch_assign+"");
properties.setProperty(prefix+"clt_batch_gen3d", this.clt_batch_gen3d+""); properties.setProperty(prefix+"clt_batch_gen3d", this.clt_batch_gen3d+"");
properties.setProperty(prefix+"clt_batch_dbg1", this.clt_batch_dbg1+""); properties.setProperty(prefix+"clt_batch_dbg1", this.clt_batch_dbg1+"");
if (aux_camera != null) { // always
updateAuxFromMain();
String aux_prefix = prefix + AUX_PREFIX;
properties.setProperty(aux_prefix+"sensorDirectory", this.aux_camera.sensorDirectory);
properties.setProperty(aux_prefix+"cltKernelDirectory", this.aux_camera.cltKernelDirectory);
properties.setProperty(aux_prefix+"resultsDirectory", this.aux_camera.resultsDirectory);
properties.setProperty(aux_prefix+"firstSubCamera", this.aux_camera.firstSubCamera+"");
properties.setProperty(aux_prefix+"numSubCameras", this.aux_camera.numSubCameras+"");
properties.setProperty(aux_prefix+"sensorPrefix", this.aux_camera.sensorPrefix);
properties.setProperty(aux_prefix+"sensorSuffix", this.aux_camera.sensorSuffix);
properties.setProperty(aux_prefix+"cltKernelPrefix", this.aux_camera.cltKernelPrefix);
properties.setProperty(aux_prefix+"cltSuffix", this.aux_camera.cltSuffix);
}
} }
public void getProperties(String prefix,Properties properties){ public void getProperties(String prefix,Properties properties){
...@@ -278,7 +430,8 @@ public class EyesisCorrectionParameters { ...@@ -278,7 +430,8 @@ public class EyesisCorrectionParameters {
if (properties.getProperty(prefix+"sourceDirectory")!= null) this.sourceDirectory=properties.getProperty(prefix+"sourceDirectory"); if (properties.getProperty(prefix+"sourceDirectory")!= null) this.sourceDirectory=properties.getProperty(prefix+"sourceDirectory");
if (properties.getProperty(prefix+"sourcePrefix")!= null) this.sourcePrefix=properties.getProperty(prefix+"sourcePrefix"); if (properties.getProperty(prefix+"sourcePrefix")!= null) this.sourcePrefix=properties.getProperty(prefix+"sourcePrefix");
if (properties.getProperty(prefix+"sourceSuffix")!= null) this.sourceSuffix=properties.getProperty(prefix+"sourceSuffix"); if (properties.getProperty(prefix+"sourceSuffix")!= null) this.sourceSuffix=properties.getProperty(prefix+"sourceSuffix");
if (properties.getProperty(prefix+"firstSubCamera")!=null) this.firstSubCamera=Integer.parseInt(properties.getProperty(prefix+"firstSubCamera")); if (properties.getProperty(prefix+"firstSubCamera")!= null) this.firstSubCamera=Integer.parseInt(properties.getProperty(prefix+"firstSubCamera"));
if (properties.getProperty(prefix+"numSubCameras")!= null) this.numSubCameras=Integer.parseInt(properties.getProperty(prefix+"numSubCameras"));
if (properties.getProperty(prefix+"sensorDirectory")!= null) this.sensorDirectory=properties.getProperty(prefix+"sensorDirectory"); if (properties.getProperty(prefix+"sensorDirectory")!= null) this.sensorDirectory=properties.getProperty(prefix+"sensorDirectory");
if (properties.getProperty(prefix+"sensorPrefix")!= null) this.sensorPrefix=properties.getProperty(prefix+"sensorPrefix"); if (properties.getProperty(prefix+"sensorPrefix")!= null) this.sensorPrefix=properties.getProperty(prefix+"sensorPrefix");
if (properties.getProperty(prefix+"sensorSuffix")!= null) this.sensorSuffix=properties.getProperty(prefix+"sensorSuffix"); if (properties.getProperty(prefix+"sensorSuffix")!= null) this.sensorSuffix=properties.getProperty(prefix+"sensorSuffix");
...@@ -346,202 +499,20 @@ public class EyesisCorrectionParameters { ...@@ -346,202 +499,20 @@ public class EyesisCorrectionParameters {
if (properties.getProperty(prefix+"clt_batch_assign")!= null) this.clt_batch_assign=Boolean.parseBoolean(properties.getProperty(prefix+"clt_batch_assign")); if (properties.getProperty(prefix+"clt_batch_assign")!= null) this.clt_batch_assign=Boolean.parseBoolean(properties.getProperty(prefix+"clt_batch_assign"));
if (properties.getProperty(prefix+"clt_batch_gen3d")!= null) this.clt_batch_gen3d=Boolean.parseBoolean(properties.getProperty(prefix+"clt_batch_gen3d")); if (properties.getProperty(prefix+"clt_batch_gen3d")!= null) this.clt_batch_gen3d=Boolean.parseBoolean(properties.getProperty(prefix+"clt_batch_gen3d"));
if (properties.getProperty(prefix+"clt_batch_dbg1")!= null) this.clt_batch_dbg1=Boolean.parseBoolean(properties.getProperty(prefix+"clt_batch_dbg1")); if (properties.getProperty(prefix+"clt_batch_dbg1")!= null) this.clt_batch_dbg1=Boolean.parseBoolean(properties.getProperty(prefix+"clt_batch_dbg1"));
}
public boolean showDialog(String title) {
GenericDialog gd = new GenericDialog(title);
gd.addCheckbox ("Splt into Bayer stack (if false will exit)", this.split);
gd.addCheckbox ("Apply vignetting/color correction to source files",this.vignetting);
gd.addCheckbox ("Replace hot/warm/cold pixels with average of neighbors",this.pixelDefects);
gd.addNumericField("Pixel difference thershold to consider it \"bad\" on 255.0 scale (0 - use all)", this.pixelDefectsThreshold, 2,6,"8.0");
String [] choices={"none","absolute","relative"};
if (this.exposureCorrectionMode<0) this.exposureCorrectionMode=0;
else if (this.exposureCorrectionMode>=choices.length) this.exposureCorrectionMode=choices.length-1;
gd.addChoice ("Exposure correction",choices, choices[this.exposureCorrectionMode]);
gd.addNumericField("Reference exposure (effective only in \"absolute\" mode)", 1000.0*this.referenceExposure, 2,6,"ms");
gd.addNumericField("Exposure scale (effective only in \"relative\" mode) 0 - darken, 1 - lighten", this.relativeExposure, 3,5,"");
gd.addCheckbox ("De-mosaic (if false will exit)", this.debayer);
gd.addCheckbox ("Show de-mosaic middle-frequency 'energy", this.showDebayerEnergy);
gd.addCheckbox ("Save de-mosaic middle-frequency 'energy", this.saveDebayerEnergy);
gd.addCheckbox ("Sharpen (convolve with calibration kernels)", this.deconvolve);
gd.addCheckbox ("Denoise (convolve with Gaussian in smooth areas)", this.combine);
gd.addCheckbox ("Show denoise mask (white - use hi-res, black - low-res)", this.showDenoiseMask);
gd.addCheckbox ("Save denoise mask (white - use hi-res, black - low-res)", this.saveDenoiseMask);
gd.addCheckbox ("Show kernel noise gains", this.showNoiseGains);
gd.addCheckbox ("Save kernel noise gains", this.saveNoiseGains);
gd.addCheckbox ("Convert colors", this.colorProc);
gd.addCheckbox ("Fix blue leak", this.blueProc);
gd.addCheckbox ("Show chroma denoise mask (white - use hi-res, black - low-res)", this.showChromaDenoiseMask);
gd.addCheckbox ("Save chroma denoise mask (white - use hi-res, black - low-res)", this.saveChromaDenoiseMask);
gd.addCheckbox ("Rotate result image", this.rotate);
gd.addCheckbox ("Crop result image to the original size", this.crop);
String [] equirectangularFormatChoices={"RGBA 8-bit","RGBA 16-bit","RGBA 32-bit integer","RGBA 32-bit float","ImageJ stack"};
int [] equirectangularFormats={0,1,2,3,4};
int equirectangularFormatIndex=0;
for ( int i=0;i<equirectangularFormats.length;i++) if (equirectangularFormats[i]==this.equirectangularFormat){
equirectangularFormatIndex=i;
break;
}
gd.addChoice ("Equirectangular output format",equirectangularFormatChoices, equirectangularFormatChoices[equirectangularFormatIndex]);
gd.addNumericField("Map 1.0 intensity to this fraction of the full range 8/16/32-bit integer mode output", 100*this.outputRangeInt, 2,6,"%");
gd.addNumericField("Map 1.0 intensity to this value in 32-bit floating point output mode", this.outputRangeFP, 2,6,"");
gd.addCheckbox ("Encode ImageJ specific Info metadata to the output file TIFF header", this.imageJTags);
gd.addCheckbox ("Convert to RGB48", this.toRGB);
gd.addCheckbox ("Convert to 8 bit RGB (and save JPEG if save is enabled)", this.jpeg);
gd.addCheckbox ("Use PNG instead of TIFF for 32 bit (8 per color) RGBA", this.png);
gd.addCheckbox ("Save the result to file system", this.save);
gd.addCheckbox ("Save 16-bit tiff if the result is 8 bit", this.save16);
gd.addCheckbox ("Save 32-bit tiff if the result is 8 or 16 bit", this.save32);
gd.addCheckbox ("Show the result image", this.show);
gd.addNumericField("JPEG quality (%)", this.JPEG_quality,0);
gd.addNumericField("JPEG scale (%)", 100* this.JPEG_scale,0);
gd.addCheckbox ("Warp results to equirectangular", this.equirectangular);
gd.addCheckbox ("Calculate distances in overlapping areas", this.zcorrect);
gd.addCheckbox ("Save current settings with results", this.saveSettings);
gd.addStringField ("Source files directory", this.sourceDirectory, 60);
gd.addCheckbox ("Select source directory", false);
gd.addStringField ("Sensor calibration directory", this.sensorDirectory, 60);
gd.addCheckbox ("Select sensor calibration directory", false);
gd.addStringField ("Aberration kernels (sharp) directory", this.sharpKernelDirectory, 60);
gd.addCheckbox ("Select aberration kernels (sharp) directory", false);
gd.addStringField ("Aberration kernels (smooth) directory", this.smoothKernelDirectory, 60);
gd.addCheckbox ("Select aberration kernels (smooth) directory", false);
gd.addStringField ("Aberration kernels for DCT directory", this.dctKernelDirectory, 60);
gd.addCheckbox ("Select aberration kernels for DCT directory", false);
gd.addStringField ("Aberration kernels for CLT directory", this.cltKernelDirectory, 60);
gd.addCheckbox ("Select aberration kernels for CLT directory", false);
gd.addStringField ("x3d model version", this.x3dModelVersion, 20); // 10a
gd.addStringField ("x3d output directory", this.x3dDirectory, 60);
gd.addCheckbox ("Select x3d output directory", false);
gd.addCheckbox ("Use individual subdirectory for each 3d model (timestamp as name)", this.use_x3d_subdirs);
gd.addStringField ("x3d subdirectory prefix", this.x3dSubdirPrefix, 10);
gd.addStringField ("x3d subdirectory suffix", this.x3dSubdirSuffix, 10);
gd.addStringField("Equirectangular maps directory (may be empty)", this.equirectangularDirectory, 60);
gd.addCheckbox("Select equirectangular maps directory", false);
gd.addStringField("Results directory", this.resultsDirectory, 60);
gd.addCheckbox("Select results directory", false);
gd.addStringField("Source files prefix", this.sourcePrefix, 60);
gd.addStringField("Source files suffix", this.sourceSuffix, 60);
gd.addNumericField("First subcamera (in the source filename)", this.firstSubCamera, 0);
gd.addStringField("Sensor files prefix", this.sensorPrefix, 40);
gd.addStringField("Sensor files suffix", this.sensorSuffix, 40);
gd.addStringField("Kernel files (sharp) prefix", this.sharpKernelPrefix, 40);
gd.addStringField("Kernel files (sharp) suffix", this.sharpKernelSuffix, 40);
gd.addStringField("Kernel files (smooth) prefix", this.smoothKernelPrefix, 40);
gd.addStringField("Kernel files (smooth) suffix", this.smoothKernelSuffix, 40);
gd.addStringField("DCT kernel files prefix", this.dctKernelPrefix, 40);
gd.addStringField("DCT symmetical kernel files", this.dctSymSuffix, 40);
gd.addStringField("DCT asymmetrical kernel files suffix", this.dctAsymSuffix, 40);
gd.addStringField("CLT kernel files prefix", this.cltKernelPrefix, 40);
gd.addStringField("CLT kernel files suffix", this.cltSuffix, 40);
gd.addStringField("Equirectangular maps prefix", this.equirectangularPrefix, 40);
gd.addStringField("Equirectangular maps suffix", this.equirectangularSuffix, 40);
gd.addCheckbox("Cut rolling-over equirectangular images in two", this.equirectangularCut);
gd.addStringField("Plane projection map prefix", this.planeMapPrefix, 40);
gd.addStringField("Plane projection map suffix", this.planeMapSuffix, 40);
gd.addCheckbox("Use projection to a common plane instead of the equirectangular", this.usePlaneProjection);
gd.addCheckbox("Save de-warped images as JPEG instead of TIFF", this.planeAsJPEG);
// gd.addStringField("Suffix for the second part of rolled-over equirectangular images", this.equirectangularSuffixA, 40);
gd.addCheckbox ("Remove unused sensor data", this.removeUnusedSensorData);
gd.addCheckbox ("Swap top and equator images", this.swapSubchannels01);
WindowTools.addScrollBars(gd);
gd.showDialog();
if (gd.wasCanceled()) return false;
this.split= gd.getNextBoolean();
this.vignetting= gd.getNextBoolean();
this.pixelDefects= gd.getNextBoolean();
this.pixelDefectsThreshold= gd.getNextNumber();
this.exposureCorrectionMode= gd.getNextChoiceIndex();
this.referenceExposure=0.001*gd.getNextNumber();
this.relativeExposure= gd.getNextNumber();
this.debayer= gd.getNextBoolean();
this.showDebayerEnergy= gd.getNextBoolean();
this.saveDebayerEnergy= gd.getNextBoolean();
this.deconvolve= gd.getNextBoolean();
this.combine= gd.getNextBoolean();
this.showDenoiseMask= gd.getNextBoolean();
this.saveDenoiseMask= gd.getNextBoolean();
this.showNoiseGains= gd.getNextBoolean();
this.saveNoiseGains= gd.getNextBoolean();
this.colorProc= gd.getNextBoolean();
this.blueProc= gd.getNextBoolean();
this.showChromaDenoiseMask= gd.getNextBoolean();
this.saveChromaDenoiseMask= gd.getNextBoolean();
this.rotate= gd.getNextBoolean();
this.crop= gd.getNextBoolean();
this.equirectangularFormat= equirectangularFormats[gd.getNextChoiceIndex()];
this.outputRangeInt=0.01*gd.getNextNumber();
this.outputRangeFP= gd.getNextNumber();
this.imageJTags= gd.getNextBoolean();
this.toRGB= gd.getNextBoolean();
this.jpeg= gd.getNextBoolean();
this.png= gd.getNextBoolean();
this.save= gd.getNextBoolean();
this.save16= gd.getNextBoolean();
this.save32= gd.getNextBoolean();
this.show= gd.getNextBoolean();
this.JPEG_quality=(int) gd.getNextNumber();
this.JPEG_scale= 0.01*gd.getNextNumber();
this.equirectangular= gd.getNextBoolean();
this.zcorrect= gd.getNextBoolean();
this.saveSettings= gd.getNextBoolean();
this.sourceDirectory= gd.getNextString(); if (gd.getNextBoolean()) selectSourceDirectory(false, false);
this.sensorDirectory= gd.getNextString(); if (gd.getNextBoolean()) selectSensorDirectory(false, false);
this.sharpKernelDirectory= gd.getNextString(); if (gd.getNextBoolean()) selectSharpKernelDirectory(false, false);
this.smoothKernelDirectory= gd.getNextString(); if (gd.getNextBoolean()) selectSmoothKernelDirectory(false, true);
this.dctKernelDirectory= gd.getNextString(); if (gd.getNextBoolean()) selectDCTKernelDirectory(false, true);
this.cltKernelDirectory= gd.getNextString(); if (gd.getNextBoolean()) selectCLTKernelDirectory(false, true);
this.x3dModelVersion= gd.getNextString(); // 10a
this.x3dDirectory= gd.getNextString(); if (gd.getNextBoolean()) selectX3dDirectory(false, true);
this.use_x3d_subdirs= gd.getNextBoolean();
this.x3dSubdirPrefix= gd.getNextString();
this.x3dSubdirSuffix= gd.getNextString();
this.equirectangularDirectory= gd.getNextString(); if (gd.getNextBoolean()) selectEquirectangularDirectory(false, false);
this.resultsDirectory= gd.getNextString(); if (gd.getNextBoolean()) selectResultsDirectory(false, true);
this.sourcePrefix= gd.getNextString();
this.sourceSuffix= gd.getNextString();
this.firstSubCamera= (int) gd.getNextNumber();
this.sensorPrefix= gd.getNextString();
this.sensorSuffix= gd.getNextString();
this.sharpKernelPrefix= gd.getNextString();
this.sharpKernelSuffix= gd.getNextString();
this.smoothKernelPrefix= gd.getNextString();
this.smoothKernelSuffix= gd.getNextString();
this.dctKernelPrefix= gd.getNextString();
this.dctSymSuffix= gd.getNextString();
this.dctAsymSuffix= gd.getNextString();
this.cltKernelPrefix= gd.getNextString();
this.cltSuffix= gd.getNextString();
this.equirectangularPrefix= gd.getNextString();
this.equirectangularSuffix= gd.getNextString();
this.equirectangularCut= gd.getNextBoolean();
this.planeMapPrefix= gd.getNextString();
this.planeMapSuffix= gd.getNextString();
this.usePlaneProjection= gd.getNextBoolean();
this.planeAsJPEG= gd.getNextBoolean();
// this.equirectangularSuffixA= gd.getNextString();
this.removeUnusedSensorData= gd.getNextBoolean();
this.swapSubchannels01= gd.getNextBoolean();
return true;
}
// copy common parameters to the auxiliary camera ones
updateAuxFromMain();
String aux_prefix = prefix + AUX_PREFIX;
if (properties.getProperty(aux_prefix+"sensorDirectory")!= null) this.aux_camera.sensorDirectory=properties.getProperty(aux_prefix+"sensorDirectory");
if (properties.getProperty(aux_prefix+"cltKernelDirectory")!= null) this.aux_camera.cltKernelDirectory=properties.getProperty(aux_prefix+"cltKernelDirectory");
if (properties.getProperty(aux_prefix+"resultsDirectory")!= null) this.aux_camera.resultsDirectory=properties.getProperty(aux_prefix+"resultsDirectory");
if (properties.getProperty(aux_prefix+"firstSubCamera")!= null) this.aux_camera.firstSubCamera=Integer.parseInt(properties.getProperty(aux_prefix+"firstSubCamera"));
if (properties.getProperty(aux_prefix+"numSubCameras")!= null) this.aux_camera.numSubCameras=Integer.parseInt(properties.getProperty(aux_prefix+"numSubCameras"));
if (properties.getProperty(aux_prefix+"sensorPrefix")!= null) this.aux_camera.sensorPrefix=properties.getProperty(aux_prefix+"sensorPrefix");
if (properties.getProperty(aux_prefix+"sensorSuffix")!= null) this.aux_camera.sensorSuffix=properties.getProperty(aux_prefix+"sensorSuffix");
if (properties.getProperty(aux_prefix+"cltKernelPrefix")!= null) this.aux_camera.cltKernelPrefix=properties.getProperty(aux_prefix+"cltKernelPrefix");
if (properties.getProperty(aux_prefix+"cltSuffix")!= null) this.aux_camera.cltSuffix=properties.getProperty(aux_prefix+"cltSuffix");
}
public boolean showJDialog(String title) { public boolean showJDialog(String title) {
// GenericDialog gd = new GenericDialog(title); // GenericDialog gd = new GenericDialog(title);
...@@ -632,7 +603,8 @@ public class EyesisCorrectionParameters { ...@@ -632,7 +603,8 @@ public class EyesisCorrectionParameters {
gd.addTab("Prefix/suffix","Prefixes and suffixes for various file types"); gd.addTab("Prefix/suffix","Prefixes and suffixes for various file types");
gd.addStringField("Source files prefix", this.sourcePrefix, 60); gd.addStringField("Source files prefix", this.sourcePrefix, 60);
gd.addStringField("Source files suffix", this.sourceSuffix, 60); gd.addStringField("Source files suffix", this.sourceSuffix, 60);
gd.addNumericField("First subcamera (in the source filename)", this.firstSubCamera, 0); gd.addNumericField("First subcamera (in the source filenames)", this.firstSubCamera, 0);
gd.addNumericField("Number of subcameras in this camera (in the source filenames)", this.numSubCameras, 0);
gd.addStringField("Sensor files prefix", this.sensorPrefix, 40); gd.addStringField("Sensor files prefix", this.sensorPrefix, 40);
gd.addStringField("Sensor files suffix", this.sensorSuffix, 40); gd.addStringField("Sensor files suffix", this.sensorSuffix, 40);
...@@ -721,6 +693,7 @@ public class EyesisCorrectionParameters { ...@@ -721,6 +693,7 @@ public class EyesisCorrectionParameters {
this.sourcePrefix= gd.getNextString(); this.sourcePrefix= gd.getNextString();
this.sourceSuffix= gd.getNextString(); this.sourceSuffix= gd.getNextString();
this.firstSubCamera= (int) gd.getNextNumber(); this.firstSubCamera= (int) gd.getNextNumber();
this.numSubCameras= (int) gd.getNextNumber();
this.sensorPrefix= gd.getNextString(); this.sensorPrefix= gd.getNextString();
this.sensorSuffix= gd.getNextString(); this.sensorSuffix= gd.getNextString();
this.sharpKernelPrefix= gd.getNextString(); this.sharpKernelPrefix= gd.getNextString();
...@@ -746,20 +719,18 @@ public class EyesisCorrectionParameters { ...@@ -746,20 +719,18 @@ public class EyesisCorrectionParameters {
} }
public boolean showCLTDialog(String title, public boolean showCLTBatchDialog(String title,
CLTParameters clt_parameters) { CLTParameters clt_parameters) {
GenericJTabbedDialog gd = new GenericJTabbedDialog(title,1000,800); GenericJTabbedDialog gd = new GenericJTabbedDialog(title,1000,1000);
updateAuxFromMain();
gd.addTab ("File paths", "Select files and directories paths (common to main and optional auxilliary)");
gd.addMessage ("============ Common to the main and optional auxiliary camera============");
gd.addTab ("File paths", "Select files and directories pahs");
gd.addCheckbox ("Save current settings with results", this.saveSettings); // 1 gd.addCheckbox ("Save current settings with results", this.saveSettings); // 1
gd.addStringField ("Source files directory", this.sourceDirectory, 60); // 2 gd.addStringField ("Source files directory", this.sourceDirectory, 60); // 2
gd.addCheckbox ("Select source directory", false); // 3 gd.addCheckbox ("Select source directory", false); // 3
gd.addStringField ("Sensor calibration directory", this.sensorDirectory, 60); // 4
gd.addCheckbox ("Select sensor calibration directory", false); // 5
gd.addStringField ("Aberration kernels for CLT directory", this.cltKernelDirectory, 60); // 6
gd.addCheckbox ("Select aberration kernels for CLT directory", false); // 7
gd.addStringField ("x3d model version", this.x3dModelVersion, 60); // 10a gd.addStringField ("x3d model version", this.x3dModelVersion, 60); // 10a
gd.addStringField ("x3d output directory", this.x3dDirectory, 60); // 8 gd.addStringField ("x3d output directory", this.x3dDirectory, 60); // 8
...@@ -767,30 +738,53 @@ public class EyesisCorrectionParameters { ...@@ -767,30 +738,53 @@ public class EyesisCorrectionParameters {
gd.addCheckbox ("Use individual subdirectory for each 3d model (timestamp as name)", this.use_x3d_subdirs); //10 gd.addCheckbox ("Use individual subdirectory for each 3d model (timestamp as name)", this.use_x3d_subdirs); //10
gd.addStringField ("Source files prefix", this.sourcePrefix, 60); // 13
gd.addStringField ("Source files suffix", this.sourceSuffix, 60); // 14
gd.addMessage ("============ Main camera============");
gd.addStringField ("Sensor calibration directory", this.sensorDirectory, 60); // 4
gd.addCheckbox ("Select sensor calibration directory", false); // 5
gd.addStringField ("Aberration kernels for CLT directory", this.cltKernelDirectory, 60); // 6
gd.addCheckbox ("Select aberration kernels for CLT directory", false); // 7
gd.addStringField ("Results directory", this.resultsDirectory, 60); // 11 gd.addStringField ("Results directory", this.resultsDirectory, 60); // 11
gd.addCheckbox ("Select results directory", false); // 12 gd.addCheckbox ("Select results directory", false); // 12
gd.addStringField ("Source files prefix", this.sourcePrefix, 60); // 13
gd.addStringField ("Source files suffix", this.sourceSuffix, 60); // 14
gd.addNumericField("First subcamera (in the source filename)", this.firstSubCamera, 0); // 15 gd.addNumericField("First subcamera (in the source filename)", this.firstSubCamera, 0); // 15
gd.addNumericField("Number of subcameras in this camera (in the source filenames)", this.numSubCameras, 0); // 16
gd.addStringField ("Sensor files prefix", this.sensorPrefix, 40); // 17
gd.addStringField ("Sensor files suffix", this.sensorSuffix, 40); // 18
gd.addStringField ("CLT kernel files prefix", this.cltKernelPrefix, 40); // 19
gd.addStringField ("CLT kernel files suffix", this.cltSuffix, 40); // 20
gd.addMessage ("============ Auxiliary camera============");
gd.addStringField ("Aux sensor calibration directory", this.aux_camera.sensorDirectory, 60); // 4b
gd.addCheckbox ("Select aux sensor calibration directory", false); // 5b
gd.addStringField ("Aberration kernels for aux CLT directory", this.aux_camera.cltKernelDirectory, 60); // 6b
gd.addCheckbox ("Select aberration kernels for aux CLT directory", false); // 7b
gd.addStringField ("Aux results directory", this.aux_camera.resultsDirectory, 60); // 11b
gd.addCheckbox ("Select aux results directory", false); // 12b
gd.addNumericField("First aux subcamera (in the source filename)", this.aux_camera.firstSubCamera, 0); // 15b
gd.addNumericField("Number of aux subcameras in this camera (in the source filenames)", this.aux_camera.numSubCameras, 0); // 16b
gd.addStringField ("Aux sensor files prefix", this.aux_camera.sensorPrefix, 40); // 17b
gd.addStringField ("Aux sensor files suffix", this.aux_camera.sensorSuffix, 40); // 18b
gd.addStringField ("Aux CLT kernel files prefix", this.aux_camera.cltKernelPrefix, 40); // 19b
gd.addStringField ("Aux CLT kernel files suffix", this.aux_camera.cltSuffix, 40); // 20b
gd.addStringField("Sensor files prefix", this.sensorPrefix, 40); // 16
gd.addStringField("Sensor files suffix", this.sensorSuffix, 40); // 17
gd.addStringField("CLT kernel files prefix", this.cltKernelPrefix, 40); // 18
gd.addStringField("CLT kernel files suffix", this.cltSuffix, 40); // 19
gd.addTab ("Batch", "Select Batch parameters"); gd.addTab ("Batch", "Select Batch parameters");
gd.addCheckbox ("Apply (and disable) manual pixel shift", this.clt_batch_apply_man); // 20 gd.addCheckbox ("Apply (and disable) manual pixel shift", this.clt_batch_apply_man); // 21
gd.addCheckbox ("Calibrate extrinsic parameters for each set", this.clt_batch_extrinsic); // 21 gd.addCheckbox ("Calibrate extrinsic parameters for each set", this.clt_batch_extrinsic); // 22
gd.addCheckbox ("Calculate fine polynomial correction for each set", this.clt_batch_poly); // 22 gd.addCheckbox ("Calculate fine polynomial correction for each set", this.clt_batch_poly); // 23
gd.addCheckbox ("Create a set of 4 images, usually for disparity = 0", this.clt_batch_4img); // 23 gd.addCheckbox ("Create a set of 4 images, usually for disparity = 0", this.clt_batch_4img); // 24
gd.addCheckbox ("1-st step of 3d reconstruction - explore disparities for each tile", this.clt_batch_explore); // 24 gd.addCheckbox ("1-st step of 3d reconstruction - explore disparities for each tile", this.clt_batch_explore); // 25
gd.addCheckbox ("Create super-tile 2.5d surfaces", this.clt_batch_surf); // 25 gd.addCheckbox ("Create super-tile 2.5d surfaces", this.clt_batch_surf); // 26
gd.addCheckbox ("Assign tiles to surfaces", this.clt_batch_assign); // 26 gd.addCheckbox ("Assign tiles to surfaces", this.clt_batch_assign); // 27
gd.addCheckbox ("Generate 3d output: x3d and/or obj+mtl", this.clt_batch_gen3d); // 27 gd.addCheckbox ("Generate 3d output: x3d and/or obj+mtl", this.clt_batch_gen3d); // 28
gd.addCheckbox ("Generate debug images if a single set is selected", this.clt_batch_dbg1); // 28 gd.addCheckbox ("Generate debug images if a single set is selected", this.clt_batch_dbg1); // 29
if (clt_parameters != null) { if (clt_parameters != null) {
// gd.addMessage ("============ selected CLT parameters ============"); // gd.addMessage ("============ selected CLT parameters ============");
gd.addTab ("CLT", "Modify selected CLT parameters"); gd.addTab ("CLT", "Modify selected CLT parameters");
...@@ -810,29 +804,43 @@ public class EyesisCorrectionParameters { ...@@ -810,29 +804,43 @@ public class EyesisCorrectionParameters {
this.saveSettings= gd.getNextBoolean(); // 1 this.saveSettings= gd.getNextBoolean(); // 1
this.sourceDirectory= gd.getNextString(); if (gd.getNextBoolean()) selectSourceDirectory(false, false); // 3 this.sourceDirectory= gd.getNextString(); if (gd.getNextBoolean()) selectSourceDirectory(false, false); // 3
this.sensorDirectory= gd.getNextString(); if (gd.getNextBoolean()) selectSensorDirectory(false, false); // 5
this.cltKernelDirectory= gd.getNextString(); if (gd.getNextBoolean()) selectCLTKernelDirectory(false, true); // 7
this.x3dModelVersion= gd.getNextString(); // 10a this.x3dModelVersion= gd.getNextString(); // 10a
this.x3dDirectory= gd.getNextString(); if (gd.getNextBoolean()) selectX3dDirectory(false, true); // 9 this.x3dDirectory= gd.getNextString(); if (gd.getNextBoolean()) selectX3dDirectory(false, true); // 9
this.use_x3d_subdirs= gd.getNextBoolean(); // 10 this.use_x3d_subdirs= gd.getNextBoolean(); // 10
this.resultsDirectory= gd.getNextString(); if (gd.getNextBoolean()) selectResultsDirectory(false, true); // 12
this.sourcePrefix= gd.getNextString(); // 13 this.sourcePrefix= gd.getNextString(); // 13
this.sourceSuffix= gd.getNextString(); // 14 this.sourceSuffix= gd.getNextString(); // 14
// main camera
this.sensorDirectory= gd.getNextString(); if (gd.getNextBoolean()) selectSensorDirectory(false, false); // 5
this.cltKernelDirectory= gd.getNextString(); if (gd.getNextBoolean()) selectCLTKernelDirectory(false, true); // 7
this.resultsDirectory= gd.getNextString(); if (gd.getNextBoolean()) selectResultsDirectory(false, true); // 12
this.firstSubCamera= (int) gd.getNextNumber(); // 15 this.firstSubCamera= (int) gd.getNextNumber(); // 15
this.sensorPrefix= gd.getNextString(); // 16 this.numSubCameras= (int) gd.getNextNumber(); // 16
this.sensorSuffix= gd.getNextString(); // 17 this.sensorPrefix= gd.getNextString(); // 17
this.cltKernelPrefix= gd.getNextString(); // 18 this.sensorSuffix= gd.getNextString(); // 18
this.cltSuffix= gd.getNextString(); // 19 this.cltKernelPrefix= gd.getNextString(); // 19
this.cltSuffix= gd.getNextString(); // 20
this.clt_batch_apply_man= gd.getNextBoolean(); // 20
this.clt_batch_extrinsic= gd.getNextBoolean(); // 21 // aux camera
this.clt_batch_poly= gd.getNextBoolean(); // 22 this.aux_camera.sensorDirectory= gd.getNextString(); if (gd.getNextBoolean()) aux_camera.selectSensorDirectory(false, false); // 5b
this.clt_batch_4img= gd.getNextBoolean(); // 23 this.aux_camera.cltKernelDirectory= gd.getNextString(); if (gd.getNextBoolean()) aux_camera.selectCLTKernelDirectory(false, true); // 7b
this.clt_batch_explore= gd.getNextBoolean(); // 24 this.aux_camera.resultsDirectory= gd.getNextString(); if (gd.getNextBoolean()) aux_camera.selectResultsDirectory(false, true); // 12b
this.clt_batch_surf= gd.getNextBoolean(); // 25 this.aux_camera.firstSubCamera= (int) gd.getNextNumber(); // 15b
this.clt_batch_assign= gd.getNextBoolean(); // 26 this.aux_camera.numSubCameras= (int) gd.getNextNumber(); // 16b
this.clt_batch_gen3d= gd.getNextBoolean(); // 27 this.aux_camera.sensorPrefix= gd.getNextString(); // 17b
this.clt_batch_dbg1= gd.getNextBoolean(); // 28 this.aux_camera.sensorSuffix= gd.getNextString(); // 18b
this.aux_camera.cltKernelPrefix= gd.getNextString(); // 19b
this.aux_camera.cltSuffix= gd.getNextString(); // 20b
this.clt_batch_apply_man= gd.getNextBoolean(); // 21
this.clt_batch_extrinsic= gd.getNextBoolean(); // 22
this.clt_batch_poly= gd.getNextBoolean(); // 23
this.clt_batch_4img= gd.getNextBoolean(); // 24
this.clt_batch_explore= gd.getNextBoolean(); // 25
this.clt_batch_surf= gd.getNextBoolean(); // 26
this.clt_batch_assign= gd.getNextBoolean(); // 27
this.clt_batch_gen3d= gd.getNextBoolean(); // 28
this.clt_batch_dbg1= gd.getNextBoolean(); // 29
if (clt_parameters != null) { if (clt_parameters != null) {
clt_parameters.grow_disp_max = gd.getNextNumber(); clt_parameters.grow_disp_max = gd.getNextNumber();
clt_parameters.gain_equalize = gd.getNextBoolean(); clt_parameters.gain_equalize = gd.getNextBoolean();
...@@ -1723,14 +1731,6 @@ public class EyesisCorrectionParameters { ...@@ -1723,14 +1731,6 @@ public class EyesisCorrectionParameters {
public double alpha_min = 0.0; public double alpha_min = 0.0;
public double alpha_max = 1.0; public double alpha_max = 1.0;
/* public RGBParameters(double r_min, double g_min, double b_min, double r_max, double g_max, double b_max) {
this.r_min = r_min;
this.g_min = g_min;
this.b_min = b_min;
this.r_max = r_max;
this.g_max = g_max;
this.b_max = b_max;
} */
public RGBParameters(double r_min, double g_min, double b_min, double r_max, double g_max, double b_max, double alpha_min, double alpha_max) { public RGBParameters(double r_min, double g_min, double b_min, double r_max, double g_max, double b_max, double alpha_min, double alpha_max) {
this.r_min = r_min; this.r_min = r_min;
this.g_min = g_min; this.g_min = g_min;
......
...@@ -3,12 +3,12 @@ ...@@ -3,12 +3,12 @@
** EyesisCorrections.java ** EyesisCorrections.java
** **
** Aberration correction for Eyesis4pi ** Aberration correction for Eyesis4pi
** **
** **
** Copyright (C) 2012 Elphel, Inc. ** Copyright (C) 2012 Elphel, Inc.
** **
** -----------------------------------------------------------------------------** ** -----------------------------------------------------------------------------**
** **
** EyesisCorrections.java is free software: you can redistribute it and/or modify ** EyesisCorrections.java is free software: you can redistribute it and/or modify
** it under the terms of the GNU General Public License as published by ** it under the terms of the GNU General Public License as published by
** the Free Software Foundation, either version 3 of the License, or ** the Free Software Foundation, either version 3 of the License, or
...@@ -25,6 +25,11 @@ ...@@ -25,6 +25,11 @@
** **
*/ */
import java.io.IOException;
import java.util.concurrent.atomic.AtomicInteger;
import javax.swing.SwingUtilities;
import ij.CompositeImage; import ij.CompositeImage;
import ij.IJ; import ij.IJ;
import ij.ImagePlus; import ij.ImagePlus;
...@@ -35,12 +40,6 @@ import ij.io.FileSaver; ...@@ -35,12 +40,6 @@ import ij.io.FileSaver;
import ij.process.ColorProcessor; import ij.process.ColorProcessor;
import ij.process.FloatProcessor; import ij.process.FloatProcessor;
import ij.process.ImageProcessor; import ij.process.ImageProcessor;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicInteger;
import javax.swing.SwingUtilities;
import loci.common.services.DependencyException; import loci.common.services.DependencyException;
import loci.common.services.ServiceException; import loci.common.services.ServiceException;
import loci.formats.FormatException; import loci.formats.FormatException;
...@@ -58,7 +57,7 @@ public class EyesisCorrections { ...@@ -58,7 +57,7 @@ public class EyesisCorrections {
public int [][][] defectsXY=null; // per each channel: pixel defects coordinates list (starting with worst) public int [][][] defectsXY=null; // per each channel: pixel defects coordinates list (starting with worst)
public double [][] defectsDiff=null; // per each channel: pixel defects value (diff from average of neighbors), matching defectsXY public double [][] defectsDiff=null; // per each channel: pixel defects value (diff from average of neighbors), matching defectsXY
public int [][] channelWidthHeight=null; public int [][] channelWidthHeight=null;
public ImagePlus [] imageNoiseGains=null; public ImagePlus [] imageNoiseGains=null;
public String [] sharpKernelPaths=null; public String [] sharpKernelPaths=null;
public String [] smoothKernelPaths=null; public String [] smoothKernelPaths=null;
...@@ -66,12 +65,12 @@ public class EyesisCorrections { ...@@ -66,12 +65,12 @@ public class EyesisCorrections {
public String [] stackColorNames= {"Red","Green","Blue"}; public String [] stackColorNames= {"Red","Green","Blue"};
public int psfSubpixelShouldBe4=4; // sub-pixel decimation public int psfSubpixelShouldBe4=4; // sub-pixel decimation
public long startTime=0; public long startTime=0;
// public boolean BUG_subchannel=true; // top channel - 1, middle - 0, bottom - 2 (should be -0-1-2) // public boolean BUG_subchannel=true; // top channel - 1, middle - 0, bottom - 2 (should be -0-1-2)
// public boolean BUG_subchannel=false; // top channel - 1, middle - 0, bottom - 2 (should be -0-1-2) // public boolean BUG_subchannel=false; // top channel - 1, middle - 0, bottom - 2 (should be -0-1-2)
public EyesisCorrections ( public EyesisCorrections (
AtomicInteger stopRequested, AtomicInteger stopRequested,
EyesisCorrectionParameters.CorrectionParameters correctionsParameters EyesisCorrectionParameters.CorrectionParameters correctionsParameters
...@@ -82,16 +81,45 @@ public class EyesisCorrections { ...@@ -82,16 +81,45 @@ public class EyesisCorrections {
public void setDebug(int debugLevel){ public void setDebug(int debugLevel){
this.debugLevel=debugLevel; this.debugLevel=debugLevel;
} }
public int getNumChannels(){return (this.usedChannels!=null)?this.usedChannels.length:0;} public int getNumChannels(){return (this.usedChannels!=null)?this.usedChannels.length:0;}
// TODO: preserve some data when re-running with new source files // TODO: preserve some data when re-running with new source files
// FIXME: Make forgiving alien files
public void initSensorFiles(int debugLevel){ public void initSensorFiles(int debugLevel){
initSensorFiles(debugLevel, false);
}
public void initSensorFiles(int debugLevel, boolean missing_ok){
this.sharpKernelPaths=null; this.sharpKernelPaths=null;
this.smoothKernelPaths=null; this.smoothKernelPaths=null;
String [] sensorPaths=correctionsParameters.selectSensorFiles(this.debugLevel);
this.pixelMapping=new PixelMapping(sensorPaths,debugLevel);
this.usedChannels= usedChannels(correctionsParameters.getSourcePaths(),missing_ok);
// TODO: Combine with additional channel map to be able to select single image (of all 3)
if (correctionsParameters.removeUnusedSensorData){
for (int nChn=0;nChn< this.usedChannels.length; nChn++) if (!this.usedChannels[nChn]) this.pixelMapping.removeChannel(nChn);
}
int numUsedChannels=0;
for (int nChn=0;nChn< this.usedChannels.length; nChn++) if (this.usedChannels[nChn]) numUsedChannels++;
if (this.debugLevel>0) {
String sChannels="";
for (int nChn=0;nChn< this.usedChannels.length; nChn++) if (this.usedChannels[nChn]) sChannels+=" "+nChn;
System.out.println ("Number of used channels: "+numUsedChannels+" ("+sChannels+" )");
}
createChannelVignetting();
if ((this.debugLevel>101) && (correctionsParameters.sourcePaths!=null) && (correctionsParameters.sourcePaths.length>0)) {
testFF(correctionsParameters.sourcePaths[0]);
}
}
public void initSensorFilesAux(int debugLevel){
// this.sharpKernelPaths=null;
// this.smoothKernelPaths=null;
String [] sensorPaths=correctionsParameters.selectSensorFiles(this.debugLevel); String [] sensorPaths=correctionsParameters.selectSensorFiles(this.debugLevel);
this.pixelMapping=new PixelMapping(sensorPaths,debugLevel); this.pixelMapping=new PixelMapping(sensorPaths,debugLevel);
this.usedChannels= usedChannels(correctionsParameters.getSourcePaths()); this.usedChannels= usedChannels(correctionsParameters.getSourcePaths());
// TODO: Combine with additional channel map to be able to select single image (of all 3) // TODO: Combine with additional channel map to be able to select single image (of all 3)
if (correctionsParameters.removeUnusedSensorData){ if (correctionsParameters.removeUnusedSensorData){
for (int nChn=0;nChn< this.usedChannels.length; nChn++) if (!this.usedChannels[nChn]) this.pixelMapping.removeChannel(nChn); for (int nChn=0;nChn< this.usedChannels.length; nChn++) if (!this.usedChannels[nChn]) this.pixelMapping.removeChannel(nChn);
} }
...@@ -108,8 +136,8 @@ public class EyesisCorrections { ...@@ -108,8 +136,8 @@ public class EyesisCorrections {
if ((this.debugLevel>101) && (correctionsParameters.sourcePaths!=null) && (correctionsParameters.sourcePaths.length>0)) { if ((this.debugLevel>101) && (correctionsParameters.sourcePaths!=null) && (correctionsParameters.sourcePaths.length>0)) {
testFF(correctionsParameters.sourcePaths[0]); testFF(correctionsParameters.sourcePaths[0]);
} }
} }
public double [] calcReferenceExposures(int debugLevel){ public double [] calcReferenceExposures(int debugLevel){
String [] paths=this.correctionsParameters.getSourcePaths(); String [] paths=this.correctionsParameters.getSourcePaths();
double [] exposures=new double [paths.length]; double [] exposures=new double [paths.length];
...@@ -161,20 +189,20 @@ public class EyesisCorrections { ...@@ -161,20 +189,20 @@ public class EyesisCorrections {
int j=firstImageIndex[nFile]; int j=firstImageIndex[nFile];
if (Double.isNaN(minMaxExposure[j][0]) || (minMaxExposure[j][0]>exposures[nFile])) minMaxExposure[j][0]=exposures[nFile]; if (Double.isNaN(minMaxExposure[j][0]) || (minMaxExposure[j][0]>exposures[nFile])) minMaxExposure[j][0]=exposures[nFile];
if (Double.isNaN(minMaxExposure[j][1]) || (minMaxExposure[j][1]<exposures[nFile])) minMaxExposure[j][1]=exposures[nFile]; if (Double.isNaN(minMaxExposure[j][1]) || (minMaxExposure[j][1]<exposures[nFile])) minMaxExposure[j][1]=exposures[nFile];
} }
for (int nFile=0;nFile<paths.length;nFile++) if (!Double.isNaN(exposures[nFile])){ for (int nFile=0;nFile<paths.length;nFile++) if (!Double.isNaN(exposures[nFile])){
int j=firstImageIndex[nFile]; int j=firstImageIndex[nFile];
exposures[nFile]=(1.0-this.correctionsParameters.relativeExposure)*minMaxExposure[j][0]+ exposures[nFile]=(1.0-this.correctionsParameters.relativeExposure)*minMaxExposure[j][0]+
this.correctionsParameters.relativeExposure*minMaxExposure[j][1]; this.correctionsParameters.relativeExposure*minMaxExposure[j][1];
} }
} }
// apply modes // apply modes
return exposures; return exposures;
} }
public void rebuildEquirectangularMaps( public void rebuildEquirectangularMaps(
EyesisCorrectionParameters.EquirectangularParameters equirectangularParameters, EyesisCorrectionParameters.EquirectangularParameters equirectangularParameters,
int threadsMax, // maximal number of threads to launch int threadsMax, // maximal number of threads to launch
boolean updateStatus, boolean updateStatus,
int debugLevel){ int debugLevel){
this.sharpKernelPaths=null; this.sharpKernelPaths=null;
...@@ -221,7 +249,7 @@ public class EyesisCorrections { ...@@ -221,7 +249,7 @@ public class EyesisCorrections {
for (int i=0;i<channelMask.length;i++) if (channelMask[i]) channelList[iChannel++]=i; for (int i=0;i<channelMask.length;i++) if (channelMask[i]) channelList[iChannel++]=i;
String sChannels=""; String sChannels="";
for (int i=0;i<channelList.length;i++) sChannels+=" "+channelList[i]; for (int i=0;i<channelList.length;i++) sChannels+=" "+channelList[i];
for (int i=0;i<channelList.length;i++) { for (int i=0;i<channelList.length;i++) {
int channel=channelList[i]; int channel=channelList[i];
if (!pixelMapping.isChannelAvailable(channel)){ if (!pixelMapping.isChannelAvailable(channel)){
...@@ -246,7 +274,7 @@ public class EyesisCorrections { ...@@ -246,7 +274,7 @@ public class EyesisCorrections {
pixelMapping.loadChannelEquirectangularMap( pixelMapping.loadChannelEquirectangularMap(
channel, channel,
path); path);
if (!this.pixelMapping.isEquirectangularMapAvailable(channel)){ if (!this.pixelMapping.isEquirectangularMapAvailable(channel)){
String msg="Failed to load equirectangular map for channel "+channel; String msg="Failed to load equirectangular map for channel "+channel;
System.out.println("Error "+msg); System.out.println("Error "+msg);
...@@ -255,8 +283,8 @@ public class EyesisCorrections { ...@@ -255,8 +283,8 @@ public class EyesisCorrections {
} }
} }
} }
String title="Projection_plane_map"; String title="Projection_plane_map";
ImagePlus imp_pixelmap= pixelMapping.getPlaneToSensorsMap( // need to re-load equirectangular maps? ImagePlus imp_pixelmap= pixelMapping.getPlaneToSensorsMap( // need to re-load equirectangular maps?
...@@ -292,7 +320,7 @@ public class EyesisCorrections { ...@@ -292,7 +320,7 @@ public class EyesisCorrections {
} else { } else {
System.out.println("Failed to create pixel map for sensors "+sChannels); System.out.println("Failed to create pixel map for sensors "+sChannels);
} }
} }
} }
...@@ -300,7 +328,7 @@ public class EyesisCorrections { ...@@ -300,7 +328,7 @@ public class EyesisCorrections {
public boolean updateImageNoiseGains( public boolean updateImageNoiseGains(
EyesisCorrectionParameters.NonlinParameters nonlinParameters, EyesisCorrectionParameters.NonlinParameters nonlinParameters,
int fftSize, // 128 - fft size, kernel size should be size/2 int fftSize, // 128 - fft size, kernel size should be size/2
int threadsMax, // maximal number of threads to launch int threadsMax, // maximal number of threads to launch
boolean updateStatus, boolean updateStatus,
int globalDebugLevel){ int globalDebugLevel){
boolean removeUnused=this.correctionsParameters.removeUnusedSensorData; boolean removeUnused=this.correctionsParameters.removeUnusedSensorData;
...@@ -329,8 +357,8 @@ public class EyesisCorrections { ...@@ -329,8 +357,8 @@ public class EyesisCorrections {
if (this.usedChannels[chn] && (this.sharpKernelPaths[chn]!=null) && (!nonlinParameters.useDiffNoiseGains ||(this.smoothKernelPaths[chn]!=null))){ if (this.usedChannels[chn] && (this.sharpKernelPaths[chn]!=null) && (!nonlinParameters.useDiffNoiseGains ||(this.smoothKernelPaths[chn]!=null))){
if ( if (
(this.imageNoiseGains[chn]==null) || (this.imageNoiseGains[chn]==null) ||
(!this.sharpKernelPaths[chn].equals((String) this.imageNoiseGains[chn].getProperty("sharpKernelPath"))) || (!this.sharpKernelPaths[chn].equals(this.imageNoiseGains[chn].getProperty("sharpKernelPath"))) ||
(!this.smoothKernelPaths[chn].equals((String) this.imageNoiseGains[chn].getProperty("smoothKernelPath")))){ (!this.smoothKernelPaths[chn].equals(this.imageNoiseGains[chn].getProperty("smoothKernelPath")))){
ImagePlus imp_kernel_sharp=new ImagePlus(this.sharpKernelPaths[chn]); ImagePlus imp_kernel_sharp=new ImagePlus(this.sharpKernelPaths[chn]);
if (imp_kernel_sharp.getStackSize()<3) { if (imp_kernel_sharp.getStackSize()<3) {
...@@ -339,7 +367,7 @@ public class EyesisCorrections { ...@@ -339,7 +367,7 @@ public class EyesisCorrections {
continue; continue;
} }
ImageStack kernel_sharp_stack= imp_kernel_sharp.getStack(); ImageStack kernel_sharp_stack= imp_kernel_sharp.getStack();
ImageStack kernel_smooth_stack=null; ImageStack kernel_smooth_stack=null;
if (nonlinParameters.useDiffNoiseGains) { if (nonlinParameters.useDiffNoiseGains) {
ImagePlus imp_kernel_smooth=new ImagePlus(this.smoothKernelPaths[chn]); ImagePlus imp_kernel_smooth=new ImagePlus(this.smoothKernelPaths[chn]);
if (imp_kernel_smooth.getStackSize()<3) { if (imp_kernel_smooth.getStackSize()<3) {
...@@ -355,11 +383,11 @@ public class EyesisCorrections { ...@@ -355,11 +383,11 @@ public class EyesisCorrections {
kernel_smooth_stack, //final ImageStack kernelStack2, // second stack with 3 colors/slices convolution kernels (or null) kernel_smooth_stack, //final ImageStack kernelStack2, // second stack with 3 colors/slices convolution kernels (or null)
fftSize, //size, // 128 - fft size, kernel size should be size/2 fftSize, //size, // 128 - fft size, kernel size should be size/2
nonlinParameters.blurSigma, nonlinParameters.blurSigma,
threadsMax, // maximal number of threads to launch threadsMax, // maximal number of threads to launch
updateStatus, updateStatus,
globalDebugLevel); globalDebugLevel);
kernel_sharp_stack= null; // TODO: - maybe keep one set to speed-up single-channel processing? kernel_sharp_stack= null; // TODO: - maybe keep one set to speed-up single-channel processing?
kernel_smooth_stack=null; kernel_smooth_stack=null;
Runtime.getRuntime().gc(); Runtime.getRuntime().gc();
String title="noiseGains_"+(nonlinParameters.useDiffNoiseGains?"diff_":"")+String.format("%02d",chn); String title="noiseGains_"+(nonlinParameters.useDiffNoiseGains?"diff_":"")+String.format("%02d",chn);
imageNoiseGains[chn]= new ImagePlus(title, kernelsNoise); imageNoiseGains[chn]= new ImagePlus(title, kernelsNoise);
...@@ -371,7 +399,7 @@ public class EyesisCorrections { ...@@ -371,7 +399,7 @@ public class EyesisCorrections {
this.correctionsParameters.saveNoiseGains, this.correctionsParameters.saveNoiseGains,
this.correctionsParameters.showNoiseGains this.correctionsParameters.showNoiseGains
); );
} }
} }
} else { } else {
if (removeUnused) this.imageNoiseGains[chn]=null; if (removeUnused) this.imageNoiseGains[chn]=null;
...@@ -380,17 +408,17 @@ public class EyesisCorrections { ...@@ -380,17 +408,17 @@ public class EyesisCorrections {
System.out.println("User requested stop"); System.out.println("User requested stop");
return false; return false;
} }
} }
return true; return true;
} }
public void createChannelVignetting(){ public void createChannelVignetting(){
this.channelWidthHeight=new int [this.usedChannels.length][]; this.channelWidthHeight=new int [this.usedChannels.length][];
this.channelVignettingCorrection=new float [this.usedChannels.length][]; this.channelVignettingCorrection=new float [this.usedChannels.length][];
this.defectsXY=new int [this.usedChannels.length][][]; this.defectsXY=new int [this.usedChannels.length][][];
this.defectsDiff=new double [this.usedChannels.length][]; this.defectsDiff=new double [this.usedChannels.length][];
for (int nChn=0;nChn< this.usedChannels.length; nChn++){ for (int nChn=0;nChn< this.usedChannels.length; nChn++){
this.channelWidthHeight[nChn]=null; this.channelWidthHeight[nChn]=null;
this.channelVignettingCorrection[nChn]=null; this.channelVignettingCorrection[nChn]=null;
...@@ -444,7 +472,7 @@ public class EyesisCorrections { ...@@ -444,7 +472,7 @@ public class EyesisCorrections {
for (int i=0;i<this.usedChannels.length;i++) if (this.usedChannels[i]) { for (int i=0;i<this.usedChannels.length;i++) if (this.usedChannels[i]) {
System.out.println(i+": subCamera="+this.pixelMapping.sensors[i].subcamera); System.out.println(i+": subCamera="+this.pixelMapping.sensors[i].subcamera);
} }
} }
if (correctionsParameters.isJP4()) imp=JP4_INSTANCE.demuxImage(imp_composite, subChannel); if (correctionsParameters.isJP4()) imp=JP4_INSTANCE.demuxImage(imp_composite, subChannel);
if (imp==null) imp=imp_composite; // not a composite image if (imp==null) imp=imp_composite; // not a composite image
...@@ -478,14 +506,18 @@ public class EyesisCorrections { ...@@ -478,14 +506,18 @@ public class EyesisCorrections {
if (((i%numInLine)==(numInLine-1)) || (i == (this.defectsXY[srcChannel].length-1))) System.out.println(); if (((i%numInLine)==(numInLine-1)) || (i == (this.defectsXY[srcChannel].length-1))) System.out.println();
} }
} }
} }
} }
} }
} }
} }
} }
boolean [] usedChannels(String [] paths){ boolean [] usedChannels(String [] paths){
return usedChannels(paths, false);
}
boolean [] usedChannels(String [] paths, boolean missing_ok){
if (paths==null) paths=new String[0]; if (paths==null) paths=new String[0];
int numChannels=this.pixelMapping.getNumChannels(); int numChannels=this.pixelMapping.getNumChannels();
boolean [] usedChannels=new boolean[numChannels]; boolean [] usedChannels=new boolean[numChannels];
...@@ -498,13 +530,15 @@ public class EyesisCorrections { ...@@ -498,13 +530,15 @@ public class EyesisCorrections {
if (channels!=null) for (int j=0;j<channels.length;j++) usedChannels[channels[j]]=true; if (channels!=null) for (int j=0;j<channels.length;j++) usedChannels[channels[j]]=true;
} else { } else {
if (!this.pixelMapping.isChannelAvailable(srcChannel)){ if (!this.pixelMapping.isChannelAvailable(srcChannel)){
if (debugLevel>0) System.out.println("No sensor data for channel "+srcChannel+", needed for source file "+paths[i]); if ((debugLevel>0) && !missing_ok) {
System.out.println("No sensor data for channel "+srcChannel+", needed for source file "+paths[i]);
}
} else usedChannels[srcChannel] = true; } else usedChannels[srcChannel] = true;
} }
} }
return usedChannels; return usedChannels;
} }
public void testFF(String path){ public void testFF(String path){
ImagePlus imp=new ImagePlus(path); ImagePlus imp=new ImagePlus(path);
imp.getProcessor().resetMinAndMax(); // imp_psf will be reused imp.getProcessor().resetMinAndMax(); // imp_psf will be reused
...@@ -534,9 +568,9 @@ public class EyesisCorrections { ...@@ -534,9 +568,9 @@ public class EyesisCorrections {
SDFA_INSTANCE.showArrays(pixelsFlat, imp.getWidth(), imp.getHeight(), srcChannel+"-flat-"+imp.getTitle()); SDFA_INSTANCE.showArrays(pixelsFlat, imp.getWidth(), imp.getHeight(), srcChannel+"-flat-"+imp.getTitle());
} }
} }
public boolean isChannelEnabled(int channel){ public boolean isChannelEnabled(int channel){
return ((channel>=0) && (channel<this.usedChannels.length) && this.usedChannels[channel]); return ((channel>=0) && (channel<this.usedChannels.length) && this.usedChannels[channel]);
} }
public void processChannelImages( public void processChannelImages(
EyesisCorrectionParameters.SplitParameters splitParameters, EyesisCorrectionParameters.SplitParameters splitParameters,
...@@ -547,7 +581,7 @@ public class EyesisCorrections { ...@@ -547,7 +581,7 @@ public class EyesisCorrections {
EyesisCorrectionParameters.RGBParameters rgbParameters, EyesisCorrectionParameters.RGBParameters rgbParameters,
EyesisCorrectionParameters.EquirectangularParameters equirectangularParameters, EyesisCorrectionParameters.EquirectangularParameters equirectangularParameters,
int convolveFFTSize, // 128 - fft size, kernel size should be size/2 int convolveFFTSize, // 128 - fft size, kernel size should be size/2
final int threadsMax, // maximal number of threads to launch final int threadsMax, // maximal number of threads to launch
final boolean updateStatus, final boolean updateStatus,
final int debugLevel){ final int debugLevel){
this.startTime=System.nanoTime(); this.startTime=System.nanoTime();
...@@ -619,9 +653,9 @@ public class EyesisCorrections { ...@@ -619,9 +653,9 @@ public class EyesisCorrections {
false); // do not show false); // do not show
imp_src=JP4_INSTANCE.demuxImage(imp_composite, subchannel); imp_src=JP4_INSTANCE.demuxImage(imp_composite, subchannel);
if (imp_src==null) imp_src=imp_composite; // not a composite image if (imp_src==null) imp_src=imp_composite; // not a composite image
// do we need to add any properties? // do we need to add any properties?
} else { } else {
imp_src=new ImagePlus(sourceFiles[nFile]); imp_src=new ImagePlus(sourceFiles[nFile]);
// (new JP46_Reader_camera(false)).decodeProperiesFromInfo(imp_src); // decode existent properties from info // (new JP46_Reader_camera(false)).decodeProperiesFromInfo(imp_src); // decode existent properties from info
JP4_INSTANCE.decodeProperiesFromInfo(imp_src); // decode existent properties from info JP4_INSTANCE.decodeProperiesFromInfo(imp_src); // decode existent properties from info
...@@ -647,7 +681,7 @@ public class EyesisCorrections { ...@@ -647,7 +681,7 @@ public class EyesisCorrections {
rgbParameters, rgbParameters,
convolveFFTSize, // 128 - fft size, kernel size should be size/2 convolveFFTSize, // 128 - fft size, kernel size should be size/2
scaleExposure, scaleExposure,
threadsMax, // maximal number of threads to launch threadsMax, // maximal number of threads to launch
updateStatus, updateStatus,
debugLevel); debugLevel);
// warp result (add support for different color modes) // warp result (add support for different color modes)
...@@ -665,7 +699,7 @@ public class EyesisCorrections { ...@@ -665,7 +699,7 @@ public class EyesisCorrections {
} }
} }
} }
public void saveTiffWithAlpha( public void saveTiffWithAlpha(
ImagePlus imp, ImagePlus imp,
EyesisCorrectionParameters.CorrectionParameters correctionsParameters) EyesisCorrectionParameters.CorrectionParameters correctionsParameters)
...@@ -675,7 +709,7 @@ public class EyesisCorrections { ...@@ -675,7 +709,7 @@ public class EyesisCorrections {
if (cutTile){ if (cutTile){
fullWidth=Integer.parseInt((String) imp.getProperty("ImageFullWidth")); fullWidth=Integer.parseInt((String) imp.getProperty("ImageFullWidth"));
x0= Integer.parseInt((String) imp.getProperty("XPosition")); x0= Integer.parseInt((String) imp.getProperty("XPosition"));
cutTile=(x0+imp.getWidth()>fullWidth) ; cutTile=(x0+imp.getWidth()>fullWidth) ;
} }
if (cutTile ) { if (cutTile ) {
if (this.debugLevel>0) System.out.println("Cutting result image in two parts to prevent roll-over"); if (this.debugLevel>0) System.out.println("Cutting result image in two parts to prevent roll-over");
...@@ -698,7 +732,7 @@ public class EyesisCorrections { ...@@ -698,7 +732,7 @@ public class EyesisCorrections {
path, path,
correctionsParameters.equirectangularFormat, correctionsParameters.equirectangularFormat,
((correctionsParameters.equirectangularFormat==3)?correctionsParameters.outputRangeFP:correctionsParameters.outputRangeInt), ((correctionsParameters.equirectangularFormat==3)?correctionsParameters.outputRangeFP:correctionsParameters.outputRangeInt),
correctionsParameters.imageJTags, correctionsParameters.imageJTags,
debugLevel); debugLevel);
} }
} }
...@@ -748,8 +782,8 @@ public class EyesisCorrections { ...@@ -748,8 +782,8 @@ public class EyesisCorrections {
(new JP46_Reader_camera(false)).encodeProperiesToInfo(imp_croped); (new JP46_Reader_camera(false)).encodeProperiesToInfo(imp_croped);
return imp_croped; return imp_croped;
} }
public ImagePlus applyEquirectangular( public ImagePlus applyEquirectangular(
int channel, int channel,
ImagePlus imp, ImagePlus imp,
...@@ -777,7 +811,7 @@ public class EyesisCorrections { ...@@ -777,7 +811,7 @@ public class EyesisCorrections {
pixelMapping.loadChannelEquirectangularMap( pixelMapping.loadChannelEquirectangularMap(
channel, channel,
path); path);
if (!this.pixelMapping.isEquirectangularMapAvailable(channel)){ if (!this.pixelMapping.isEquirectangularMapAvailable(channel)){
String msg="Failed to load equirectangular map for channel "+channel; String msg="Failed to load equirectangular map for channel "+channel;
System.out.println("Error "+msg); System.out.println("Error "+msg);
...@@ -785,7 +819,7 @@ public class EyesisCorrections { ...@@ -785,7 +819,7 @@ public class EyesisCorrections {
return null; return null;
} }
} }
// apply warping here // apply warping here
// double sourceImageScale=2.0*this.correctionsParameters.JPEG_scale; // double sourceImageScale=2.0*this.correctionsParameters.JPEG_scale;
int sourceImageScale=2; // *this.correctionsParameters.JPEG_scale; int sourceImageScale=2; // *this.correctionsParameters.JPEG_scale;
ImagePlus imp_warped= pixelMapping.resampleToEquirectangular( // will Add "_EQR" ImagePlus imp_warped= pixelMapping.resampleToEquirectangular( // will Add "_EQR"
...@@ -823,7 +857,7 @@ public class EyesisCorrections { ...@@ -823,7 +857,7 @@ public class EyesisCorrections {
// channel, // channel,
path, path,
debugLevel); debugLevel);
if (!this.pixelMapping.isPlaneMapMapAvailable(channel)){ if (!this.pixelMapping.isPlaneMapMapAvailable(channel)){
String msg="Failed to load a common plane projection map for channel "+channel+", or that file does not have this sensor data"; String msg="Failed to load a common plane projection map for channel "+channel+", or that file does not have this sensor data";
System.out.println("Error "+msg); System.out.println("Error "+msg);
...@@ -831,7 +865,7 @@ public class EyesisCorrections { ...@@ -831,7 +865,7 @@ public class EyesisCorrections {
return null; return null;
} }
} }
// apply warping here // apply warping here
// double sourceImageScale=2.0*this.correctionsParameters.JPEG_scale; // double sourceImageScale=2.0*this.correctionsParameters.JPEG_scale;
int sourceImageScale=2; // *this.correctionsParameters.JPEG_scale; int sourceImageScale=2; // *this.correctionsParameters.JPEG_scale;
ImagePlus imp_warped= pixelMapping.applyPlaneMap( ImagePlus imp_warped= pixelMapping.applyPlaneMap(
...@@ -876,8 +910,8 @@ public class EyesisCorrections { ...@@ -876,8 +910,8 @@ public class EyesisCorrections {
} }
return numApplied; return numApplied;
} }
public ImagePlus processChannelImage( public ImagePlus processChannelImage(
ImagePlus imp_src, // should have properties "name"(base for saving results), "channel","path" ImagePlus imp_src, // should have properties "name"(base for saving results), "channel","path"
EyesisCorrectionParameters.SplitParameters splitParameters, EyesisCorrectionParameters.SplitParameters splitParameters,
...@@ -888,14 +922,14 @@ public class EyesisCorrections { ...@@ -888,14 +922,14 @@ public class EyesisCorrections {
EyesisCorrectionParameters.RGBParameters rgbParameters, EyesisCorrectionParameters.RGBParameters rgbParameters,
int convolveFFTSize, // 128 - fft size, kernel size should be size/2 int convolveFFTSize, // 128 - fft size, kernel size should be size/2
double scaleExposure, double scaleExposure,
final int threadsMax, // maximal number of threads to launch final int threadsMax, // maximal number of threads to launch
final boolean updateStatus, final boolean updateStatus,
final int debugLevel){ final int debugLevel){
boolean advanced=this.correctionsParameters.zcorrect || this.correctionsParameters.equirectangular; boolean advanced=this.correctionsParameters.zcorrect || this.correctionsParameters.equirectangular;
boolean crop= advanced? true: this.correctionsParameters.crop; boolean crop= advanced? true: this.correctionsParameters.crop;
boolean rotate= advanced? false: this.correctionsParameters.rotate; boolean rotate= advanced? false: this.correctionsParameters.rotate;
double JPEG_scale= advanced? 1.0: this.correctionsParameters.JPEG_scale; double JPEG_scale= advanced? 1.0: this.correctionsParameters.JPEG_scale;
boolean toRGB= advanced? true: this.correctionsParameters.toRGB; boolean toRGB= advanced? true: this.correctionsParameters.toRGB;
// may use this.StartTime to report intermediate steps execution times // may use this.StartTime to report intermediate steps execution times
String name=(String) imp_src.getProperty("name"); String name=(String) imp_src.getProperty("name");
...@@ -934,13 +968,13 @@ public class EyesisCorrections { ...@@ -934,13 +968,13 @@ public class EyesisCorrections {
saveAndShow(result, this.correctionsParameters); saveAndShow(result, this.correctionsParameters);
return result; return result;
} }
// Split into Bayer components, oversample, increase canvas // Split into Bayer components, oversample, increase canvas
ImageStack stack= bayerToStack( ImageStack stack= bayerToStack(
result, // source Bayer image, linearized, 32-bit (float)) result, // source Bayer image, linearized, 32-bit (float))
splitParameters); splitParameters);
String titleFull=title+"-SPLIT"; String titleFull=title+"-SPLIT";
if (!this.correctionsParameters.debayer) { if (!this.correctionsParameters.debayer) {
result= new ImagePlus(titleFull, stack); result= new ImagePlus(titleFull, stack);
saveAndShow(result, this.correctionsParameters); saveAndShow(result, this.correctionsParameters);
return result; return result;
} }
...@@ -972,7 +1006,7 @@ public class EyesisCorrections { ...@@ -972,7 +1006,7 @@ public class EyesisCorrections {
result= new ImagePlus(titleFull, stack); result= new ImagePlus(titleFull, stack);
if (this.correctionsParameters.deconvolve) { if (this.correctionsParameters.deconvolve) {
//Ask for the kernel directory if it is undefined //Ask for the kernel directory if it is undefined
if (this.sharpKernelPaths==null){ // make sure the paths list is reset after changing parameters if (this.sharpKernelPaths==null){ // make sure the paths list is reset after changing parameters
this.sharpKernelPaths=correctionsParameters.selectKernelChannelFiles( this.sharpKernelPaths=correctionsParameters.selectKernelChannelFiles(
0, // 0 - sharp, 1 - smooth 0, // 0 - sharp, 1 - smooth
this.usedChannels.length, // number of channels this.usedChannels.length, // number of channels
...@@ -993,7 +1027,7 @@ public class EyesisCorrections { ...@@ -993,7 +1027,7 @@ public class EyesisCorrections {
ImageStack stackDeconvolvedSharp= convolveStackWithKernelStack( // stack_d ImageStack stackDeconvolvedSharp= convolveStackWithKernelStack( // stack_d
stack, // stack with 3 colors/slices with the image stack, // stack with 3 colors/slices with the image
convolutionSharpKernelStack, // stack with 3 colors/slices convolution kernels convolutionSharpKernelStack, // stack with 3 colors/slices convolution kernels
convolveFFTSize, // 128 - fft size, kernel size should be size/2 convolveFFTSize, // 128 - fft size, kernel size should be size/2
threadsMax, threadsMax,
updateStatus, // update status info updateStatus, // update status info
debugLevel); debugLevel);
...@@ -1003,7 +1037,7 @@ public class EyesisCorrections { ...@@ -1003,7 +1037,7 @@ public class EyesisCorrections {
titleFull=title+"-DECONV"; titleFull=title+"-DECONV";
if (this.correctionsParameters.combine) { if (this.correctionsParameters.combine) {
// Read "smooth" kernels // Read "smooth" kernels
if (this.smoothKernelPaths==null){ // make sure the paths list is reset after changing parameters if (this.smoothKernelPaths==null){ // make sure the paths list is reset after changing parameters
this.smoothKernelPaths=correctionsParameters.selectKernelChannelFiles( this.smoothKernelPaths=correctionsParameters.selectKernelChannelFiles(
1, // 0 - sharp, 1 - smooth 1, // 0 - sharp, 1 - smooth
this.usedChannels.length, // number of channels this.usedChannels.length, // number of channels
...@@ -1023,7 +1057,7 @@ public class EyesisCorrections { ...@@ -1023,7 +1057,7 @@ public class EyesisCorrections {
ImageStack stackDeconvolvedSmooth = convolveStackWithKernelStack( //stack_g ImageStack stackDeconvolvedSmooth = convolveStackWithKernelStack( //stack_g
stack, // stack with 3 colors/slices with the image stack, // stack with 3 colors/slices with the image
convolutionSmoothKernelStack, // stack with 3 colors/slices convolution kernels convolutionSmoothKernelStack, // stack with 3 colors/slices convolution kernels
convolveFFTSize, // 128 - fft size, kernel size should be size/2 convolveFFTSize, // 128 - fft size, kernel size should be size/2
threadsMax, threadsMax,
updateStatus, // update status info updateStatus, // update status info
debugLevel); debugLevel);
...@@ -1040,7 +1074,7 @@ public class EyesisCorrections { ...@@ -1040,7 +1074,7 @@ public class EyesisCorrections {
nonlinParameters.noiseGainPower nonlinParameters.noiseGainPower
); );
// show noise mask here? // show noise mask here?
nonlinParameters.showMask=this.correctionsParameters.showDenoiseMask; nonlinParameters.showMask=this.correctionsParameters.showDenoiseMask;
// if (DEBUG_LEVEL>1) System.out.println ( " noiseMask.length="+((noiseMask==null)?"null":(noiseMask.length+" noiseMask[0].length="+noiseMask[0].length))); // if (DEBUG_LEVEL>1) System.out.println ( " noiseMask.length="+((noiseMask==null)?"null":(noiseMask.length+" noiseMask[0].length="+noiseMask[0].length)));
// CorrectionDenoise correctionDenoise=new CorrectionDenoise(stopRequested); // CorrectionDenoise correctionDenoise=new CorrectionDenoise(stopRequested);
...@@ -1066,7 +1100,7 @@ public class EyesisCorrections { ...@@ -1066,7 +1100,7 @@ public class EyesisCorrections {
if (this.correctionsParameters.crop){ if (this.correctionsParameters.crop){
denoiseMask=cropImage32(denoiseMask,splitParameters); denoiseMask=cropImage32(denoiseMask,splitParameters);
} }
//rotate the result //rotate the result
if (this.correctionsParameters.rotate){ if (this.correctionsParameters.rotate){
denoiseMask=rotateImage32CW(denoiseMask); denoiseMask=rotateImage32CW(denoiseMask);
} }
...@@ -1078,7 +1112,7 @@ public class EyesisCorrections { ...@@ -1078,7 +1112,7 @@ public class EyesisCorrections {
denoiseMask= new ImagePlus(denoiseMask.getTitle(),ip); denoiseMask= new ImagePlus(denoiseMask.getTitle(),ip);
denoiseMask.updateAndDraw(); denoiseMask.updateAndDraw();
} }
if (this.correctionsParameters.showDenoiseMask) denoiseMask.show(); if (this.correctionsParameters.showDenoiseMask) denoiseMask.show();
//public ImagePlus Image32toGreyRGB24(ImagePlus imp); //public ImagePlus Image32toGreyRGB24(ImagePlus imp);
if (this.correctionsParameters.saveDenoiseMask) { if (this.correctionsParameters.saveDenoiseMask) {
ImagePlus denoiseMaskRGB24=Image32toGreyRGB24(denoiseMask); ImagePlus denoiseMaskRGB24=Image32toGreyRGB24(denoiseMask);
...@@ -1104,7 +1138,7 @@ public class EyesisCorrections { ...@@ -1104,7 +1138,7 @@ public class EyesisCorrections {
} else if (this.correctionsParameters.combine) { // "combine" w/o "deconvolve" - just use convolution with smooth kernels } else if (this.correctionsParameters.combine) { // "combine" w/o "deconvolve" - just use convolution with smooth kernels
// Read smooth kernels // Read smooth kernels
// Read "smooth" kernels // Read "smooth" kernels
if (this.smoothKernelPaths==null){ // make sure the paths list is reset after changing parameters if (this.smoothKernelPaths==null){ // make sure the paths list is reset after changing parameters
this.smoothKernelPaths=correctionsParameters.selectKernelChannelFiles( this.smoothKernelPaths=correctionsParameters.selectKernelChannelFiles(
1, // 0 - sharp, 1 - smooth 1, // 0 - sharp, 1 - smooth
this.usedChannels.length, // number of channels this.usedChannels.length, // number of channels
...@@ -1124,7 +1158,7 @@ public class EyesisCorrections { ...@@ -1124,7 +1158,7 @@ public class EyesisCorrections {
ImageStack stackDeconvolvedSmooth = convolveStackWithKernelStack( // stack_g ImageStack stackDeconvolvedSmooth = convolveStackWithKernelStack( // stack_g
stack, // stack with 3 colors/slices with the image stack, // stack with 3 colors/slices with the image
convolutionSmoothKernelStack, // stack with 3 colors/slices convolution kernels convolutionSmoothKernelStack, // stack with 3 colors/slices convolution kernels
convolveFFTSize, // 128 - fft size, kernel size should be size/2 convolveFFTSize, // 128 - fft size, kernel size should be size/2
threadsMax, threadsMax,
updateStatus, // update status info updateStatus, // update status info
debugLevel); debugLevel);
...@@ -1134,9 +1168,9 @@ public class EyesisCorrections { ...@@ -1134,9 +1168,9 @@ public class EyesisCorrections {
Runtime.getRuntime().gc(); Runtime.getRuntime().gc();
titleFull=title+"-LOWRES"; titleFull=title+"-LOWRES";
}// end of if (this.correctionsParameters.deconvolve) }// end of if (this.correctionsParameters.deconvolve)
//stack now has the result, titleFull - correct title for the image //stack now has the result, titleFull - correct title for the image
if (!this.correctionsParameters.colorProc){ if (!this.correctionsParameters.colorProc){
result= new ImagePlus(titleFull, stack); result= new ImagePlus(titleFull, stack);
saveAndShow( saveAndShow(
result, result,
this.correctionsParameters); this.correctionsParameters);
...@@ -1156,7 +1190,7 @@ public class EyesisCorrections { ...@@ -1156,7 +1190,7 @@ public class EyesisCorrections {
imp_dbg, imp_dbg,
this.correctionsParameters); this.correctionsParameters);
} }
correctionColorProc.processColorsWeights(stack, correctionColorProc.processColorsWeights(stack,
// 255.0/this.psfSubpixelShouldBe4/this.psfSubpixelShouldBe4, // double scale, // initial maximal pixel value (16)) // 255.0/this.psfSubpixelShouldBe4/this.psfSubpixelShouldBe4, // double scale, // initial maximal pixel value (16))
255.0/this.psfSubpixelShouldBe4/this.psfSubpixelShouldBe4/scaleExposure, // double scale, // initial maximal pixel value (16)) 255.0/this.psfSubpixelShouldBe4/this.psfSubpixelShouldBe4/scaleExposure, // double scale, // initial maximal pixel value (16))
...@@ -1174,7 +1208,7 @@ public class EyesisCorrections { ...@@ -1174,7 +1208,7 @@ public class EyesisCorrections {
this.correctionsParameters); this.correctionsParameters);
} }
// Show/save color denoise mask // Show/save color denoise mask
if ((this.correctionsParameters.saveChromaDenoiseMask || this.correctionsParameters.showChromaDenoiseMask) && (correctionColorProc.getDenoiseMaskChroma()!=null)) { if ((this.correctionsParameters.saveChromaDenoiseMask || this.correctionsParameters.showChromaDenoiseMask) && (correctionColorProc.getDenoiseMaskChroma()!=null)) {
ImagePlus chromaDenoiseMask=SDFA_INSTANCE.makeArrays (correctionColorProc.getDenoiseMaskChroma(), ImagePlus chromaDenoiseMask=SDFA_INSTANCE.makeArrays (correctionColorProc.getDenoiseMaskChroma(),
correctionColorProc.getDenoiseMaskChromaWidth(), correctionColorProc.getDenoiseMaskChromaWidth(),
...@@ -1185,7 +1219,7 @@ public class EyesisCorrections { ...@@ -1185,7 +1219,7 @@ public class EyesisCorrections {
if (this.correctionsParameters.crop){ if (this.correctionsParameters.crop){
chromaDenoiseMask=cropImage32(chromaDenoiseMask,splitParameters); chromaDenoiseMask=cropImage32(chromaDenoiseMask,splitParameters);
} }
//rotate the result //rotate the result
if (this.correctionsParameters.rotate){ if (this.correctionsParameters.rotate){
chromaDenoiseMask=rotateImage32CW(chromaDenoiseMask); chromaDenoiseMask=rotateImage32CW(chromaDenoiseMask);
} }
...@@ -1197,7 +1231,7 @@ public class EyesisCorrections { ...@@ -1197,7 +1231,7 @@ public class EyesisCorrections {
chromaDenoiseMask= new ImagePlus(chromaDenoiseMask.getTitle(),ip); chromaDenoiseMask= new ImagePlus(chromaDenoiseMask.getTitle(),ip);
chromaDenoiseMask.updateAndDraw(); chromaDenoiseMask.updateAndDraw();
} }
if (this.correctionsParameters.showChromaDenoiseMask) chromaDenoiseMask.show(); if (this.correctionsParameters.showChromaDenoiseMask) chromaDenoiseMask.show();
//public ImagePlus Image32toGreyRGB24(ImagePlus imp); //public ImagePlus Image32toGreyRGB24(ImagePlus imp);
if (this.correctionsParameters.saveChromaDenoiseMask) { if (this.correctionsParameters.saveChromaDenoiseMask) {
ImagePlus chromaDenoiseMaskRGB24=Image32toGreyRGB24(chromaDenoiseMask); ImagePlus chromaDenoiseMaskRGB24=Image32toGreyRGB24(chromaDenoiseMask);
...@@ -1241,7 +1275,7 @@ public class EyesisCorrections { ...@@ -1241,7 +1275,7 @@ public class EyesisCorrections {
titleFull=title+"-YPrPb"; // including "-DECONV" or "-COMBO" titleFull=title+"-YPrPb"; // including "-DECONV" or "-COMBO"
if (debugLevel>1) System.out.println("Using full stack, including YPbPr"); if (debugLevel>1) System.out.println("Using full stack, including YPbPr");
} }
result= new ImagePlus(titleFull, stack); result= new ImagePlus(titleFull, stack);
// Crop image to match original one (scaled to oversampling) // Crop image to match original one (scaled to oversampling)
if (crop){ // always crop if equirectangular if (crop){ // always crop if equirectangular
stack=cropStack32(stack,splitParameters); stack=cropStack32(stack,splitParameters);
...@@ -1253,7 +1287,7 @@ public class EyesisCorrections { ...@@ -1253,7 +1287,7 @@ public class EyesisCorrections {
} }
} }
// rotate the result // rotate the result
if (rotate){ // never rotate for equirectangular if (rotate){ // never rotate for equirectangular
stack=rotateStack32CW(stack); stack=rotateStack32CW(stack);
} }
...@@ -1268,7 +1302,7 @@ public class EyesisCorrections { ...@@ -1268,7 +1302,7 @@ public class EyesisCorrections {
if (this.correctionsParameters.equirectangularFormat==0){ if (this.correctionsParameters.equirectangularFormat==0){
stack=convertRGB32toRGB16Stack( stack=convertRGB32toRGB16Stack(
stack, stack,
rgbParameters); rgbParameters);
titleFull=title+"-RGB48"; titleFull=title+"-RGB48";
result= new ImagePlus(titleFull, stack); result= new ImagePlus(titleFull, stack);
...@@ -1348,7 +1382,7 @@ public class EyesisCorrections { ...@@ -1348,7 +1382,7 @@ public class EyesisCorrections {
this.correctionsParameters, this.correctionsParameters,
this.correctionsParameters.save, this.correctionsParameters.save,
this.correctionsParameters.show, this.correctionsParameters.show,
this.correctionsParameters.JPEG_quality); this.correctionsParameters.JPEG_quality);
} else { } else {
if (this.correctionsParameters.equirectangularFormat<4){ if (this.correctionsParameters.equirectangularFormat<4){
...@@ -1377,10 +1411,10 @@ public class EyesisCorrections { ...@@ -1377,10 +1411,10 @@ public class EyesisCorrections {
} }
return result; return result;
} }
/* ======================================================================== */ /* ======================================================================== */
// private boolean fixSliceSequence ( // private boolean fixSliceSequence (
public boolean fixSliceSequence ( // for EyesisDCT public boolean fixSliceSequence ( // for EyesisDCT
ImageStack stack, ImageStack stack,
...@@ -1433,10 +1467,10 @@ public class EyesisCorrections { ...@@ -1433,10 +1467,10 @@ public class EyesisCorrections {
stack.setPixels (stack.getPixels(slice2), slice1); stack.setPixels (stack.getPixels(slice2), slice1);
stack.setPixels (pixels, slice2); stack.setPixels (pixels, slice2);
} }
/* ======================================================================== */ /* ======================================================================== */
public ImageStack cropStack32( public ImageStack cropStack32(
...@@ -1483,7 +1517,7 @@ public class EyesisCorrections { ...@@ -1483,7 +1517,7 @@ public class EyesisCorrections {
stack_rot.addSlice(stack.getSliceLabel(i+1), opixels); stack_rot.addSlice(stack.getSliceLabel(i+1), opixels);
} }
return stack_rot; return stack_rot;
} }
/* ======================================================================== */ /* ======================================================================== */
public ImagePlus cropImage32( public ImagePlus cropImage32(
...@@ -1564,9 +1598,9 @@ public class EyesisCorrections { ...@@ -1564,9 +1598,9 @@ public class EyesisCorrections {
stack16.addSlice(stack32.getSliceLabel(i+1), spixels); stack16.addSlice(stack32.getSliceLabel(i+1), spixels);
} }
return stack16; return stack16;
} }
public ImagePlus convertRGB48toRGB24( public ImagePlus convertRGB48toRGB24(
ImageStack stack16, ImageStack stack16,
String title, String title,
...@@ -1586,11 +1620,11 @@ public class EyesisCorrections { ...@@ -1586,11 +1620,11 @@ public class EyesisCorrections {
if (numSlices > 4) numSlices = 4; if (numSlices > 4) numSlices = 4;
short [][] spixels=new short[numSlices][]; short [][] spixels=new short[numSlices][];
int [] sliceSeq = new int [numSlices]; int [] sliceSeq = new int [numSlices];
for (int j = 0; j < numSlices; j++) sliceSeq[j] = (j + ((numSlices > 3)? 3:0)) % 4; for (int j = 0; j < numSlices; j++) sliceSeq[j] = (j + ((numSlices > 3)? 3:0)) % 4;
int [] pixels=new int[length]; int [] pixels=new int[length];
int c,d; int c,d;
double [] scale=new double[numSlices]; double [] scale=new double[numSlices];
for (c = 0; c < numSlices; c++) { for (c = 0; c < numSlices; c++) {
scale[c]=256.0/(maxs[c]-mins[c]); scale[c]=256.0/(maxs[c]-mins[c]);
...@@ -1614,7 +1648,7 @@ public class EyesisCorrections { ...@@ -1614,7 +1648,7 @@ public class EyesisCorrections {
public ImageStack convertRGB48toRGBA24Stack( public ImageStack convertRGB48toRGBA24Stack(
ImageStack stack16, ImageStack stack16,
double [] dalpha, // alpha pixel array 0..1.0 or null double [] dalpha, // alpha pixel array 0..1.0 or null
// String title, // String title,
int r_min, int r_min,
int r_max, int r_max,
...@@ -1652,7 +1686,7 @@ public class EyesisCorrections { ...@@ -1652,7 +1686,7 @@ public class EyesisCorrections {
} }
return stack8; return stack8;
} }
/* ======================================================================== */ /* ======================================================================== */
public ImagePlus Image32toGreyRGB24( public ImagePlus Image32toGreyRGB24(
ImagePlus imp){ ImagePlus imp){
...@@ -1681,10 +1715,10 @@ public class EyesisCorrections { ...@@ -1681,10 +1715,10 @@ public class EyesisCorrections {
} }
/* ======================================================================== */ /* ======================================================================== */
/* Combine 2 stacks and a mask */ /* Combine 2 stacks and a mask */
public ImageStack combineStacksWithMask (ImageStack stack_bg, public ImageStack combineStacksWithMask (ImageStack stack_bg,
ImageStack stack_fg, ImageStack stack_fg,
// float [] mask ) { // float [] mask ) {
double [] mask ) { double [] mask ) {
...@@ -1703,10 +1737,10 @@ public class EyesisCorrections { ...@@ -1703,10 +1737,10 @@ public class EyesisCorrections {
return stack; return stack;
} }
/* ======================================================================== */ /* ======================================================================== */
public double [] getSlidingMask(int size) { // duplicate with DebayerScissors public double [] getSlidingMask(int size) { // duplicate with DebayerScissors
double [] mask = new double [size*size]; double [] mask = new double [size*size];
...@@ -1720,17 +1754,17 @@ public class EyesisCorrections { ...@@ -1720,17 +1754,17 @@ public class EyesisCorrections {
} }
/* ======================================================================== */ /* ======================================================================== */
/* convolve image stack with the kernel stack using FHT. kernels should be (size/2)*(size/2) - currently 64x64, then image will be split into same /* convolve image stack with the kernel stack using FHT. kernels should be (size/2)*(size/2) - currently 64x64, then image will be split into same
(size/2)*(size/2) overlapping by step=size/4 segments. Both are zero-padded to size x size, so after convolution the result will not roll over, and (size/2)*(size/2) overlapping by step=size/4 segments. Both are zero-padded to size x size, so after convolution the result will not roll over, and
processed 128x128 result arrays are accumulated in the output stack. processed 128x128 result arrays are accumulated in the output stack.
The input image should be properly extended by size/4 in each direction (and so the kernel arrays should match it) - that would minimize border effects.*/ The input image should be properly extended by size/4 in each direction (and so the kernel arrays should match it) - that would minimize border effects.*/
/* ======================================================================== */ /* ======================================================================== */
public ImageStack convolveStackWithKernelStack ( public ImageStack convolveStackWithKernelStack (
final ImageStack imageStack, // stack with 3 colors/slices with the image final ImageStack imageStack, // stack with 3 colors/slices with the image
final ImageStack kernelStack, // stack with 3 colors/slices convolution kernels final ImageStack kernelStack, // stack with 3 colors/slices convolution kernels
final int size, // 128 - fft size, kernel size should be size/2 final int size, // 128 - fft size, kernel size should be size/2
final int threadsMax, // maximal number of threads to launch final int threadsMax, // maximal number of threads to launch
final boolean updateStatus, // update status info final boolean updateStatus, // update status info
final int globalDebugLevel) final int globalDebugLevel)
{ {
...@@ -1755,7 +1789,7 @@ public class EyesisCorrections { ...@@ -1755,7 +1789,7 @@ public class EyesisCorrections {
final AtomicInteger ai = new AtomicInteger(0); final AtomicInteger ai = new AtomicInteger(0);
final int numberOfKernels= tilesY*tilesX*nChn; final int numberOfKernels= tilesY*tilesX*nChn;
final int numberOfKernelsInChn=tilesY*tilesX; final int numberOfKernelsInChn=tilesY*tilesX;
int ichn,indx,dx,dy,tx,ty,li; int ichn,indx,dx,dy,tx,ty,li;
final int [] nonOverlapSeq = new int[numberOfKernels]; final int [] nonOverlapSeq = new int[numberOfKernels];
int [] nextFirstFindex=new int[16*nChn]; int [] nextFirstFindex=new int[16*nChn];
...@@ -1769,8 +1803,8 @@ public class EyesisCorrections { ...@@ -1769,8 +1803,8 @@ public class EyesisCorrections {
} }
final AtomicInteger aStopIndex = new AtomicInteger(0); final AtomicInteger aStopIndex = new AtomicInteger(0);
final AtomicInteger tilesFinishedAtomic = new AtomicInteger(1); // first finished will be 1 final AtomicInteger tilesFinishedAtomic = new AtomicInteger(1); // first finished will be 1
if (globalDebugLevel>1) if (globalDebugLevel>1)
System.out.println("Eyesis_Corrections:convolveStackWithKernelStack :\n"+ System.out.println("Eyesis_Corrections:convolveStackWithKernelStack :\n"+
"globalDebugLevel="+globalDebugLevel+"\n"+ "globalDebugLevel="+globalDebugLevel+"\n"+
"imgWidth="+imgWidth+"\n"+ "imgWidth="+imgWidth+"\n"+
...@@ -1784,7 +1818,7 @@ public class EyesisCorrections { ...@@ -1784,7 +1818,7 @@ public class EyesisCorrections {
"kernelWidth="+kernelWidth+"\n"+ "kernelWidth="+kernelWidth+"\n"+
"kernelNumHor="+kernelNumHor+"\n"+ "kernelNumHor="+kernelNumHor+"\n"+
"numberOfKernelsInChn="+numberOfKernelsInChn+"\n"); "numberOfKernelsInChn="+numberOfKernelsInChn+"\n");
if (updateStatus) IJ.showStatus("Convolving image with kernels, "+nChn+" channels, "+tilesY+" rows"); if (updateStatus) IJ.showStatus("Convolving image with kernels, "+nChn+" channels, "+tilesY+" rows");
final long startTime = System.nanoTime(); final long startTime = System.nanoTime();
for (li = 0; li < nextFirstFindex.length; li++){ for (li = 0; li < nextFirstFindex.length; li++){
...@@ -1795,7 +1829,8 @@ public class EyesisCorrections { ...@@ -1795,7 +1829,8 @@ public class EyesisCorrections {
// System.out.println("\n=== nextFirstFindex["+li+"] =" + nextFirstFindex[li]+" === "); // System.out.println("\n=== nextFirstFindex["+li+"] =" + nextFirstFindex[li]+" === ");
for (int ithread = 0; ithread < threads.length; ithread++) { for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() { threads[ithread] = new Thread() {
public void run() { @Override
public void run() {
float [] pixels=null; // will be initialized at first use float [] pixels=null; // will be initialized at first use
float [] kernelPixels=null; // will be initialized at first use float [] kernelPixels=null; // will be initialized at first use
double [] kernel= new double[kernelSize*kernelSize]; double [] kernel= new double[kernelSize*kernelSize];
...@@ -1838,7 +1873,7 @@ public class EyesisCorrections { ...@@ -1838,7 +1873,7 @@ public class EyesisCorrections {
fht_instance.swapQuadrants(outTile); fht_instance.swapQuadrants(outTile);
fht_instance.transform( outTile); fht_instance.transform( outTile);
/* read convolution kernel */ /* read convolution kernel */
extractOneKernel(kernelPixels, // array of combined square kernels, each extractOneKernel(kernelPixels, // array of combined square kernels, each
kernel, // will be filled, should have correct size before call kernel, // will be filled, should have correct size before call
kernelNumHor, // number of kernels in a row kernelNumHor, // number of kernels in a row
//tileX*kernelSize, // horizontal number of kernel to extract //tileX*kernelSize, // horizontal number of kernel to extract
...@@ -1880,17 +1915,18 @@ public class EyesisCorrections { ...@@ -1880,17 +1915,18 @@ public class EyesisCorrections {
final int numFinished=tilesFinishedAtomic.getAndIncrement(); final int numFinished=tilesFinishedAtomic.getAndIncrement();
if (numFinished % (numberOfKernels/100+1) == 0) { if (numFinished % (numberOfKernels/100+1) == 0) {
SwingUtilities.invokeLater(new Runnable() { SwingUtilities.invokeLater(new Runnable() {
public void run() { @Override
public void run() {
IJ.showProgress(numFinished,numberOfKernels); IJ.showProgress(numFinished,numberOfKernels);
} }
}); });
} }
//numberOfKernels //numberOfKernels
} }
} }
}; };
} }
startAndJoin(threads); startAndJoin(threads);
} }
if (updateStatus) IJ.showStatus("Convolution DONE"); if (updateStatus) IJ.showStatus("Convolution DONE");
...@@ -1951,10 +1987,10 @@ public class EyesisCorrections { ...@@ -1951,10 +1987,10 @@ public class EyesisCorrections {
} }
return pixels; return pixels;
} }
// duplicates with DebayerScissors // duplicates with DebayerScissors
/* ======================================================================== */ /* ======================================================================== */
/**extract and multiply by window function (same size as kernel itself) */ /**extract and multiply by window function (same size as kernel itself) */
...@@ -2006,7 +2042,7 @@ public class EyesisCorrections { ...@@ -2006,7 +2042,7 @@ public class EyesisCorrections {
} }
} }
/* ======================================================================== */ /* ======================================================================== */
/* accumulate square tile to the pixel array (tile may extend beyond the array, will be cropped) */ /* accumulate square tile to the pixel array (tile may extend beyond the array, will be cropped) */
synchronized void accumulateSquareTile( synchronized void accumulateSquareTile(
...@@ -2056,8 +2092,8 @@ public class EyesisCorrections { ...@@ -2056,8 +2092,8 @@ public class EyesisCorrections {
} }
} }
} }
synchronized void accumulateSquareTile( synchronized void accumulateSquareTile(
double [] pixels, // float pixels array to accumulate tile double [] pixels, // float pixels array to accumulate tile
double [] tile, // data to accumulate to the pixels array double [] tile, // data to accumulate to the pixels array
...@@ -2082,7 +2118,7 @@ public class EyesisCorrections { ...@@ -2082,7 +2118,7 @@ public class EyesisCorrections {
} }
} }
// end of duplicates with DebayerScissors // end of duplicates with DebayerScissors
/* Convert source Bayer pattern (GR/BG) image to higher resolution, add margins by duplicating pattern around */ /* Convert source Bayer pattern (GR/BG) image to higher resolution, add margins by duplicating pattern around */
public ImageStack bayerToStack(ImagePlus imp, // source bayer image, linearized, 32-bit (float)) public ImageStack bayerToStack(ImagePlus imp, // source bayer image, linearized, 32-bit (float))
EyesisCorrectionParameters.SplitParameters splitParameters){ EyesisCorrectionParameters.SplitParameters splitParameters){
...@@ -2134,10 +2170,10 @@ public class EyesisCorrections { ...@@ -2134,10 +2170,10 @@ public class EyesisCorrections {
if (imp==null) return null; if (imp==null) return null;
boolean adv = splitParameters != null; boolean adv = splitParameters != null;
int oversample = adv? splitParameters.oversample : 1; int oversample = adv? splitParameters.oversample : 1;
int addTop= adv?splitParameters.addTop: 0; int addTop= adv?splitParameters.addTop: 0;
int addLeft= adv?splitParameters.addLeft: 0; int addLeft= adv?splitParameters.addLeft: 0;
int addBottom= adv?splitParameters.addBottom: 0; int addBottom= adv?splitParameters.addBottom: 0;
int addRight= adv?splitParameters.addRight: 0; int addRight= adv?splitParameters.addRight: 0;
String [] chnNames={"Red","Blue","Green"}; //Different sequence than RGB!! String [] chnNames={"Red","Blue","Green"}; //Different sequence than RGB!!
int nChn=chnNames.length; int nChn=chnNames.length;
ImageProcessor ip=imp.getProcessor(); ImageProcessor ip=imp.getProcessor();
...@@ -2171,14 +2207,14 @@ public class EyesisCorrections { ...@@ -2171,14 +2207,14 @@ public class EyesisCorrections {
} }
return outPixels; return outPixels;
} }
//double [] DENOISE_MASK=null; //double [] DENOISE_MASK=null;
// TODO: do similar for JP4, using "subcamera" to "use" all channels for it // TODO: do similar for JP4, using "subcamera" to "use" all channels for it
/* ======================================================================== */ /* ======================================================================== */
/* Calculate deconvolution kernel (or difference of the two) noise gain /* Calculate deconvolution kernel (or difference of the two) noise gain
* to be used when calculating mask that selects between deconvolved with * to be used when calculating mask that selects between deconvolved with
* different kernels * different kernels
...@@ -2188,7 +2224,7 @@ public class EyesisCorrections { ...@@ -2188,7 +2224,7 @@ public class EyesisCorrections {
final ImageStack kernelStack2, // second stack with 3 colors/slices convolution kernels (or null) final ImageStack kernelStack2, // second stack with 3 colors/slices convolution kernels (or null)
final int size, // 128 - fft size, kernel size should be size/2 final int size, // 128 - fft size, kernel size should be size/2
final double blurSigma, final double blurSigma,
final int threadsMax, // maximal number of threads to launch final int threadsMax, // maximal number of threads to launch
final boolean updateStatus, final boolean updateStatus,
final int globalDebugLevel) // update status info final int globalDebugLevel) // update status info
{ {
...@@ -2210,7 +2246,8 @@ public class EyesisCorrections { ...@@ -2210,7 +2246,8 @@ public class EyesisCorrections {
final long startTime = System.nanoTime(); final long startTime = System.nanoTime();
for (int ithread = 0; ithread < threads.length; ithread++) { for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() { threads[ithread] = new Thread() {
public void run() { @Override
public void run() {
DoubleGaussianBlur gb=null; DoubleGaussianBlur gb=null;
if (blurSigma>0) gb=new DoubleGaussianBlur(); if (blurSigma>0) gb=new DoubleGaussianBlur();
float [] kernelPixels1= null; // will be initialized at first use float [] kernelPixels1= null; // will be initialized at first use
...@@ -2229,20 +2266,20 @@ public class EyesisCorrections { ...@@ -2229,20 +2266,20 @@ public class EyesisCorrections {
if (updateStatus) IJ.showStatus("Processing kernels, channel "+(chn+1)+" of "+nChn+", row "+(tileY+1)+" of "+kernelNumVert); if (updateStatus) IJ.showStatus("Processing kernels, channel "+(chn+1)+" of "+nChn+", row "+(tileY+1)+" of "+kernelNumVert);
if (globalDebugLevel>2) System.out.println("Processing kernels, channel "+(chn+1)+" of "+nChn+", row "+(tileY+1)+" of "+kernelNumVert+" : "+IJ.d2s(0.000000001*(System.nanoTime()-startTime),3)); if (globalDebugLevel>2) System.out.println("Processing kernels, channel "+(chn+1)+" of "+nChn+", row "+(tileY+1)+" of "+kernelNumVert+" : "+IJ.d2s(0.000000001*(System.nanoTime()-startTime),3));
} }
if (chn!=chn0) { if (chn!=chn0) {
kernelPixels1=(float[]) kernelStack1.getPixels(chn+1); kernelPixels1=(float[]) kernelStack1.getPixels(chn+1);
if (useDiff) kernelPixels2=(float[]) kernelStack2.getPixels(chn+1); if (useDiff) kernelPixels2=(float[]) kernelStack2.getPixels(chn+1);
chn0=chn; chn0=chn;
} }
/* read convolution kernel */ /* read convolution kernel */
extractOneKernel(kernelPixels1, // array of combined square kernels, each extractOneKernel(kernelPixels1, // array of combined square kernels, each
kernel1, // will be filled, should have correct size before call kernel1, // will be filled, should have correct size before call
kernelNumHor, // number of kernels in a row kernelNumHor, // number of kernels in a row
tileX, // horizontal number of kernel to extract tileX, // horizontal number of kernel to extract
tileY); // vertical number of kernel to extract tileY); // vertical number of kernel to extract
/* optionally read the second convolution kernel */ /* optionally read the second convolution kernel */
if (useDiff) {extractOneKernel(kernelPixels2, // array of combined square kernels, each if (useDiff) {extractOneKernel(kernelPixels2, // array of combined square kernels, each
kernel2, // will be filled, should have correct size before call kernel2, // will be filled, should have correct size before call
kernelNumHor, // number of kernels in a row kernelNumHor, // number of kernels in a row
tileX, // horizontal number of kernel to extract tileX, // horizontal number of kernel to extract
...@@ -2258,7 +2295,7 @@ public class EyesisCorrections { ...@@ -2258,7 +2295,7 @@ public class EyesisCorrections {
} }
} }
}; };
} }
startAndJoin(threads); startAndJoin(threads);
if (globalDebugLevel > 1) System.out.println("Threads done at "+IJ.d2s(0.000000001*(System.nanoTime()-startTime),3)); if (globalDebugLevel > 1) System.out.println("Threads done at "+IJ.d2s(0.000000001*(System.nanoTime()-startTime),3));
/* prepare result stack to return */ /* prepare result stack to return */
...@@ -2268,8 +2305,8 @@ public class EyesisCorrections { ...@@ -2268,8 +2305,8 @@ public class EyesisCorrections {
} }
return outStack; return outStack;
} }
void extractOneKernel(float [] pixels, // array of combined square kernels, each void extractOneKernel(float [] pixels, // array of combined square kernels, each
double [] kernel, // will be filled, should have correct size before call double [] kernel, // will be filled, should have correct size before call
int numHor, // number of kernels in a row int numHor, // number of kernels in a row
int xTile, // horizontal number of kernel to extract int xTile, // horizontal number of kernel to extract
...@@ -2290,7 +2327,7 @@ public class EyesisCorrections { ...@@ -2290,7 +2327,7 @@ public class EyesisCorrections {
} }
/* Extract noise mask (proportional to noise gain of the kernels), the denoise mask should be divided by this /* Extract noise mask (proportional to noise gain of the kernels), the denoise mask should be divided by this
* *
*/ */
public double [][] extractNoiseMask( public double [][] extractNoiseMask(
ImagePlus imp,// contains 3-slice stack (r,b,g) ImagePlus imp,// contains 3-slice stack (r,b,g)
...@@ -2347,8 +2384,8 @@ public class EyesisCorrections { ...@@ -2347,8 +2384,8 @@ public class EyesisCorrections {
boolean save, boolean save,
boolean show){ boolean show){
saveAndShow(imp, correctionsParameters, save, show, -1); saveAndShow(imp, correctionsParameters, save, show, -1);
} }
void saveAndShow( void saveAndShow(
ImagePlus imp, ImagePlus imp,
EyesisCorrectionParameters.CorrectionParameters correctionsParameters, EyesisCorrectionParameters.CorrectionParameters correctionsParameters,
...@@ -2359,7 +2396,7 @@ public class EyesisCorrections { ...@@ -2359,7 +2396,7 @@ public class EyesisCorrections {
if (save) path= correctionsParameters.selectResultsDirectory( if (save) path= correctionsParameters.selectResultsDirectory(
true, // smart, true, // smart,
true); //newAllowed, // save true); //newAllowed, // save
saveAndShow( saveAndShow(
imp, imp,
path, path,
...@@ -2382,8 +2419,8 @@ public class EyesisCorrections { ...@@ -2382,8 +2419,8 @@ public class EyesisCorrections {
jpegQuality,// <0 - keep current, 0 - force Tiff, >0 use for JPEG jpegQuality,// <0 - keep current, 0 - force Tiff, >0 use for JPEG
this.debugLevel); this.debugLevel);
} }
void saveAndShow( void saveAndShow(
ImagePlus imp, ImagePlus imp,
String path, String path,
...@@ -2403,7 +2440,7 @@ public class EyesisCorrections { ...@@ -2403,7 +2440,7 @@ public class EyesisCorrections {
} }
} }
} }
if (hasAlphaHighByte){ if (hasAlphaHighByte){
if (png){ if (png){
if (debugLevel > 0) System.out.println("Saving RGBA result to "+path+".png"); if (debugLevel > 0) System.out.println("Saving RGBA result to "+path+".png");
...@@ -2418,7 +2455,7 @@ public class EyesisCorrections { ...@@ -2418,7 +2455,7 @@ public class EyesisCorrections {
(new EyesisTiff()).saveTiffARGB32( (new EyesisTiff()).saveTiffARGB32(
imp, imp,
path+".tiff", path+".tiff",
false, // correctionsParameters.imageJTags, false, // correctionsParameters.imageJTags,
debugLevel); debugLevel);
} catch (IOException e) { } catch (IOException e) {
e.printStackTrace(); e.printStackTrace();
...@@ -2430,7 +2467,7 @@ public class EyesisCorrections { ...@@ -2430,7 +2467,7 @@ public class EyesisCorrections {
e.printStackTrace(); e.printStackTrace();
} }
} }
} else if (((imp.getStackSize()==1)) && (jpegQuality!=0) && ((imp.getFileInfo().fileType== FileInfo.RGB) || (jpegQuality>0))) { } else if (((imp.getStackSize()==1)) && (jpegQuality!=0) && ((imp.getFileInfo().fileType== FileInfo.RGB) || (jpegQuality>0))) {
if (debugLevel>0) System.out.println("Saving result to "+path+".jpeg"); if (debugLevel>0) System.out.println("Saving result to "+path+".jpeg");
FileSaver fs=new FileSaver(imp); FileSaver fs=new FileSaver(imp);
...@@ -2452,7 +2489,7 @@ public class EyesisCorrections { ...@@ -2452,7 +2489,7 @@ public class EyesisCorrections {
path+".tiff", path+".tiff",
mode, // mode, //
1.0, // full scale, absolute 1.0, // full scale, absolute
false, // correctionsParameters.imageJTags, false, // correctionsParameters.imageJTags,
debugLevel); debugLevel);
} catch (IOException e) { } catch (IOException e) {
e.printStackTrace(); e.printStackTrace();
...@@ -2472,7 +2509,7 @@ public class EyesisCorrections { ...@@ -2472,7 +2509,7 @@ public class EyesisCorrections {
imp.show(); imp.show();
} }
} }
private void saveAndShow( private void saveAndShow(
CompositeImage compositeImage, CompositeImage compositeImage,
EyesisCorrectionParameters.CorrectionParameters correctionsParameters, EyesisCorrectionParameters.CorrectionParameters correctionsParameters,
...@@ -2481,8 +2518,8 @@ public class EyesisCorrections { ...@@ -2481,8 +2518,8 @@ public class EyesisCorrections {
String path=null; String path=null;
if (save) path= correctionsParameters.selectResultsDirectory( if (save) path= correctionsParameters.selectResultsDirectory(
true, // smart, true, // smart,
true); //newAllowed, // save true); //newAllowed, // save
if (path!=null) { if (path!=null) {
path+=Prefs.getFileSeparator()+compositeImage.getTitle(); path+=Prefs.getFileSeparator()+compositeImage.getTitle();
if (debugLevel>0) System.out.println("Saving result to "+path+".tiff"); if (debugLevel>0) System.out.println("Saving result to "+path+".tiff");
...@@ -2496,7 +2533,7 @@ public class EyesisCorrections { ...@@ -2496,7 +2533,7 @@ public class EyesisCorrections {
} }
} }
/* ======================================================================== */ /* ======================================================================== */
/* Create a Thread[] array as large as the number of processors available. /* Create a Thread[] array as large as the number of processors available.
* From Stephan Preibisch's Multithreading.java class. See: * From Stephan Preibisch's Multithreading.java class. See:
...@@ -2520,7 +2557,7 @@ public class EyesisCorrections { ...@@ -2520,7 +2557,7 @@ public class EyesisCorrections {
} }
try try
{ {
for (int ithread = 0; ithread < threads.length; ++ithread) for (int ithread = 0; ithread < threads.length; ++ithread)
threads[ithread].join(); threads[ithread].join();
} catch (InterruptedException ie) } catch (InterruptedException ie)
...@@ -2528,6 +2565,6 @@ public class EyesisCorrections { ...@@ -2528,6 +2565,6 @@ public class EyesisCorrections {
throw new RuntimeException(ie); throw new RuntimeException(ie);
} }
} }
} }
This source diff could not be displayed because it is too large. You can view the blob instead.
...@@ -154,6 +154,9 @@ public class PixelMapping { ...@@ -154,6 +154,9 @@ public class PixelMapping {
// as stored in "subcamera" field of the calibration file and "sensor_port". sensor_port may start from non-0, so we need to count all combinations // as stored in "subcamera" field of the calibration file and "sensor_port". sensor_port may start from non-0, so we need to count all combinations
//removeUnusedSensorData xshould be off! //removeUnusedSensorData xshould be off!
public int [] channelsForSubCamera(int subCamera){ public int [] channelsForSubCamera(int subCamera){
if (subCamera < 0) {
return null;
}
System.out.println("channelsForSubCamera("+subCamera+"),this.sensors.length="+this.sensors.length); System.out.println("channelsForSubCamera("+subCamera+"),this.sensors.length="+this.sensors.length);
// ArrayList<ArrayList<ArrayList<Integer>>> camera_IPs = new ArrayList<ArrayList<ArrayList<Integer>>>(); // ArrayList<ArrayList<ArrayList<Integer>>> camera_IPs = new ArrayList<ArrayList<ArrayList<Integer>>>();
ArrayList<Point> cam_port = new ArrayList<Point>(); ArrayList<Point> cam_port = new ArrayList<Point>();
...@@ -172,11 +175,11 @@ public class PixelMapping { ...@@ -172,11 +175,11 @@ public class PixelMapping {
} }
}); });
// debugging: // debugging:
System.out.println("----- This filename subcamera "+subCamera+": physical camera "+cam_port_arr[subCamera].x+", sensor_port "+cam_port_arr[subCamera].y);
if (subCamera >= cam_port_arr.length) { if (subCamera >= cam_port_arr.length) {
System.out.println("Error: Subcamera "+subCamera+" > that total namera of sensor ports in the system = "+cam_port_arr.length); System.out.println("Error: Subcamera "+subCamera+" > that total number of sensor ports in the system = "+cam_port_arr.length);
return null; return null;
} }
System.out.println("----- This filename subcamera "+subCamera+": physical camera "+cam_port_arr[subCamera].x+", sensor_port "+cam_port_arr[subCamera].y);
if (this.sensors == null) return null; if (this.sensors == null) return null;
int numChannels=0; int numChannels=0;
for (int i=0;i<this.sensors.length;i++) if (this.sensors[i]!=null) { for (int i=0;i<this.sensors.length;i++) if (this.sensors[i]!=null) {
......
...@@ -44,9 +44,11 @@ import ij.process.ImageProcessor; ...@@ -44,9 +44,11 @@ import ij.process.ImageProcessor;
public class QuadCLT { public class QuadCLT {
static String [] fine_corr_coeff_names = {"A","B","C","D","E","F"}; static String [] fine_corr_coeff_names = {"A","B","C","D","E","F"};
static String [] fine_corr_dir_names = {"X","Y"}; static String [] fine_corr_dir_names = {"X","Y"};
static String prefix = "EYESIS_DCT."; // change later (first on save) public static String PREFIX = "EYESIS_DCT."; // change later (first on save)
public static String PREFIX_AUX = "EYESIS_DCT_AUX."; // change later (first on save)
static int QUAD = 4; // number of cameras static int QUAD = 4; // number of cameras
public Properties properties = null; public Properties properties = null;
// public String properties_prefix = "EYESIS_DCT.";
public EyesisCorrections eyesisCorrections = null; public EyesisCorrections eyesisCorrections = null;
public EyesisCorrectionParameters.CorrectionParameters correctionsParameters=null; public EyesisCorrectionParameters.CorrectionParameters correctionsParameters=null;
double [][][][][][] clt_kernels = null; double [][][][][][] clt_kernels = null;
...@@ -86,6 +88,7 @@ public class QuadCLT { ...@@ -86,6 +88,7 @@ public class QuadCLT {
} }
public QuadCLT( public QuadCLT(
String prefix,
Properties properties, Properties properties,
EyesisCorrections eyesisCorrections, EyesisCorrections eyesisCorrections,
EyesisCorrectionParameters.CorrectionParameters correctionsParameters EyesisCorrectionParameters.CorrectionParameters correctionsParameters
...@@ -93,11 +96,18 @@ public class QuadCLT { ...@@ -93,11 +96,18 @@ public class QuadCLT {
this.eyesisCorrections= eyesisCorrections; this.eyesisCorrections= eyesisCorrections;
this.correctionsParameters = correctionsParameters; this.correctionsParameters = correctionsParameters;
this.properties = properties; this.properties = properties;
getProperties(); // this.properties_prefix = prefix;
// System.out.println("new QuadCLT(), prefix = "+prefix);
getProperties(prefix);
} }
// TODO:Add saving just calibration // TODO:Add saving just calibration
public void setProperties(){ // save
// public void setProperties(){
// setProperties(this.properties_prefix);
// }
public void setProperties(String prefix){ // save
// System.out.println("setProperties("+prefix+")");
for (int n = 0; n < fine_corr.length; n++){ for (int n = 0; n < fine_corr.length; n++){
for (int d = 0; d < fine_corr[n].length; d++){ for (int d = 0; d < fine_corr[n].length; d++){
for (int i = 0; i < fine_corr[n][d].length; i++){ for (int i = 0; i < fine_corr[n][d].length; i++){
...@@ -113,9 +123,45 @@ public class QuadCLT { ...@@ -113,9 +123,45 @@ public class QuadCLT {
for (int i = 0; i < GeometryCorrection.CORR_NAMES.length; i++){ for (int i = 0; i < GeometryCorrection.CORR_NAMES.length; i++){
String name = prefix+"extrinsic_corr_"+GeometryCorrection.CORR_NAMES[i]; String name = prefix+"extrinsic_corr_"+GeometryCorrection.CORR_NAMES[i];
properties.setProperty(name, gc.getCorrVector().toArray()[i]+""); properties.setProperty(name, gc.getCorrVector().toArray()[i]+"");
// System.out.println("setProperties():"+i+": setProperty("+name+","+gc.getCorrVector().toArray()[i]+"");
}
}
public void copyPropertiesFrom(Properties other_properties, String other_prefix, String this_prefix){ // save
// System.out.println("copyPropertiesFrom(other_properties, "+other_prefix+", this_prefix"+")");
for (int n = 0; n < fine_corr.length; n++){
for (int d = 0; d < fine_corr[n].length; d++){
for (int i = 0; i < fine_corr[n][d].length; i++){
String other_name = other_prefix+"fine_corr_"+n+fine_corr_dir_names[d]+fine_corr_coeff_names[i];
String this_name = this_prefix+"fine_corr_"+n+fine_corr_dir_names[d]+fine_corr_coeff_names[i];
if (other_properties.getProperty(other_name)!=null) {
this.fine_corr[n][d][i]=Double.parseDouble(other_properties.getProperty(other_name));
properties.setProperty(this_name, this.fine_corr[n][d][i]+"");
}
}
}
}
GeometryCorrection gc = geometryCorrection;
if (gc == null) { // if it was not yet created
gc = new GeometryCorrection(this.extrinsic_corr);
}
for (int i = 0; i < GeometryCorrection.CORR_NAMES.length; i++){
String other_name = other_prefix+"extrinsic_corr_"+GeometryCorrection.CORR_NAMES[i];
if (other_properties.getProperty(other_name)!=null) {
this.extrinsic_corr[i] = Double.parseDouble(other_properties.getProperty(other_name));
if (geometryCorrection != null){
geometryCorrection.getCorrVector().toArray()[i] = this.extrinsic_corr[i];
}
}
String this_name = this_prefix+"extrinsic_corr_"+GeometryCorrection.CORR_NAMES[i];
properties.setProperty(this_name, gc.getCorrVector().toArray()[i]+"");
// System.out.println("copyPropertiesFrom():"+i+": setProperty("+this_name+","+gc.getCorrVector().toArray()[i]+"");
} }
// System.out.println("Done copyPropertiesFrom");
} }
public void listGeometryCorrection(boolean full){ public void listGeometryCorrection(boolean full){
GeometryCorrection gc = geometryCorrection; GeometryCorrection gc = geometryCorrection;
if (gc == null) { // if it was not yet created if (gc == null) { // if it was not yet created
...@@ -124,7 +170,8 @@ public class QuadCLT { ...@@ -124,7 +170,8 @@ public class QuadCLT {
gc.listGeometryCorrection(full); gc.listGeometryCorrection(full);
} }
public void getProperties(){ // restore public void getProperties(String prefix){ // restore
// System.out.println("getProperties("+prefix+")");
for (int n = 0; n < fine_corr.length; n++){ for (int n = 0; n < fine_corr.length; n++){
for (int d = 0; d < fine_corr[n].length; d++){ for (int d = 0; d < fine_corr[n].length; d++){
for (int i = 0; i < fine_corr[n][d].length; i++){ for (int i = 0; i < fine_corr[n][d].length; i++){
...@@ -138,6 +185,8 @@ public class QuadCLT { ...@@ -138,6 +185,8 @@ public class QuadCLT {
String name = prefix+"extrinsic_corr_"+GeometryCorrection.CORR_NAMES[i]; String name = prefix+"extrinsic_corr_"+GeometryCorrection.CORR_NAMES[i];
if (properties.getProperty(name)!=null) { if (properties.getProperty(name)!=null) {
this.extrinsic_corr[i] = Double.parseDouble(properties.getProperty(name)); this.extrinsic_corr[i] = Double.parseDouble(properties.getProperty(name));
// System.out.println("getProperties():"+i+": getProperty("+name+") -> "+properties.getProperty(name)+"");
if (geometryCorrection != null){ if (geometryCorrection != null){
geometryCorrection.getCorrVector().toArray()[i] = this.extrinsic_corr[i]; geometryCorrection.getCorrVector().toArray()[i] = this.extrinsic_corr[i];
} }
...@@ -1180,7 +1229,7 @@ public class QuadCLT { ...@@ -1180,7 +1229,7 @@ public class QuadCLT {
public void processCLTChannelImages( public void processCLTChannelImages(
EyesisCorrectionParameters.CLTParameters clt_parameters, EyesisCorrectionParameters.CLTParameters clt_parameters,
EyesisCorrectionParameters.DebayerParameters debayerParameters, EyesisCorrectionParameters.DebayerParameters debayerParameters,
EyesisCorrectionParameters.NonlinParameters nonlinParameters, // EyesisCorrectionParameters.NonlinParameters nonlinParameters,
EyesisCorrectionParameters.ColorProcParameters colorProcParameters, EyesisCorrectionParameters.ColorProcParameters colorProcParameters,
CorrectionColorProc.ColorGainsParameters channelGainParameters, CorrectionColorProc.ColorGainsParameters channelGainParameters,
EyesisCorrectionParameters.RGBParameters rgbParameters, EyesisCorrectionParameters.RGBParameters rgbParameters,
...@@ -1282,7 +1331,7 @@ public class QuadCLT { ...@@ -1282,7 +1331,7 @@ public class QuadCLT {
imp_src, // should have properties "name"(base for saving results), "channel","path" imp_src, // should have properties "name"(base for saving results), "channel","path"
clt_parameters, clt_parameters,
debayerParameters, debayerParameters,
nonlinParameters, // nonlinParameters,
colorProcParameters, colorProcParameters,
channelGainParameters, channelGainParameters,
rgbParameters, rgbParameters,
...@@ -1314,7 +1363,7 @@ public class QuadCLT { ...@@ -1314,7 +1363,7 @@ public class QuadCLT {
// EyesisCorrectionParameters.DCTParameters dct_parameters, // EyesisCorrectionParameters.DCTParameters dct_parameters,
EyesisCorrectionParameters.CLTParameters clt_parameters, EyesisCorrectionParameters.CLTParameters clt_parameters,
EyesisCorrectionParameters.DebayerParameters debayerParameters, EyesisCorrectionParameters.DebayerParameters debayerParameters,
EyesisCorrectionParameters.NonlinParameters nonlinParameters, // EyesisCorrectionParameters.NonlinParameters nonlinParameters,
EyesisCorrectionParameters.ColorProcParameters colorProcParameters, EyesisCorrectionParameters.ColorProcParameters colorProcParameters,
CorrectionColorProc.ColorGainsParameters channelGainParameters, CorrectionColorProc.ColorGainsParameters channelGainParameters,
EyesisCorrectionParameters.RGBParameters rgbParameters, EyesisCorrectionParameters.RGBParameters rgbParameters,
...@@ -1768,7 +1817,7 @@ public class QuadCLT { ...@@ -1768,7 +1817,7 @@ public class QuadCLT {
public void processCLTSets( public void processCLTSets(
EyesisCorrectionParameters.CLTParameters clt_parameters, EyesisCorrectionParameters.CLTParameters clt_parameters,
EyesisCorrectionParameters.DebayerParameters debayerParameters, EyesisCorrectionParameters.DebayerParameters debayerParameters,
EyesisCorrectionParameters.NonlinParameters nonlinParameters, // EyesisCorrectionParameters.NonlinParameters nonlinParameters,
EyesisCorrectionParameters.ColorProcParameters colorProcParameters, EyesisCorrectionParameters.ColorProcParameters colorProcParameters,
CorrectionColorProc.ColorGainsParameters channelGainParameters, CorrectionColorProc.ColorGainsParameters channelGainParameters,
EyesisCorrectionParameters.RGBParameters rgbParameters, EyesisCorrectionParameters.RGBParameters rgbParameters,
...@@ -2008,7 +2057,7 @@ public class QuadCLT { ...@@ -2008,7 +2057,7 @@ public class QuadCLT {
imp_srcs[srcChannel], // should have properties "name"(base for saving results), "channel","path" imp_srcs[srcChannel], // should have properties "name"(base for saving results), "channel","path"
clt_parameters, clt_parameters,
debayerParameters, debayerParameters,
nonlinParameters, // nonlinParameters,
colorProcParameters, colorProcParameters,
channelGainParameters, channelGainParameters,
rgbParameters, rgbParameters,
...@@ -2042,7 +2091,7 @@ public class QuadCLT { ...@@ -2042,7 +2091,7 @@ public class QuadCLT {
ImagePlus imp_src, // should have properties "name"(base for saving results), "channel","path" ImagePlus imp_src, // should have properties "name"(base for saving results), "channel","path"
EyesisCorrectionParameters.CLTParameters clt_parameters, EyesisCorrectionParameters.CLTParameters clt_parameters,
EyesisCorrectionParameters.DebayerParameters debayerParameters, EyesisCorrectionParameters.DebayerParameters debayerParameters,
EyesisCorrectionParameters.NonlinParameters nonlinParameters, // EyesisCorrectionParameters.NonlinParameters nonlinParameters,
EyesisCorrectionParameters.ColorProcParameters colorProcParameters, EyesisCorrectionParameters.ColorProcParameters colorProcParameters,
CorrectionColorProc.ColorGainsParameters channelGainParameters, CorrectionColorProc.ColorGainsParameters channelGainParameters,
EyesisCorrectionParameters.RGBParameters rgbParameters, EyesisCorrectionParameters.RGBParameters rgbParameters,
...@@ -2418,7 +2467,7 @@ public class QuadCLT { ...@@ -2418,7 +2467,7 @@ public class QuadCLT {
public void processCLTQuads( public void processCLTQuads(
EyesisCorrectionParameters.CLTParameters clt_parameters, EyesisCorrectionParameters.CLTParameters clt_parameters,
EyesisCorrectionParameters.DebayerParameters debayerParameters, EyesisCorrectionParameters.DebayerParameters debayerParameters,
EyesisCorrectionParameters.NonlinParameters nonlinParameters, // EyesisCorrectionParameters.NonlinParameters nonlinParameters,
EyesisCorrectionParameters.ColorProcParameters colorProcParameters, EyesisCorrectionParameters.ColorProcParameters colorProcParameters,
CorrectionColorProc.ColorGainsParameters channelGainParameters, CorrectionColorProc.ColorGainsParameters channelGainParameters,
EyesisCorrectionParameters.RGBParameters rgbParameters, EyesisCorrectionParameters.RGBParameters rgbParameters,
...@@ -2655,7 +2704,7 @@ public class QuadCLT { ...@@ -2655,7 +2704,7 @@ public class QuadCLT {
imp_srcs, // [srcChannel], // should have properties "name"(base for saving results), "channel","path" imp_srcs, // [srcChannel], // should have properties "name"(base for saving results), "channel","path"
clt_parameters, clt_parameters,
debayerParameters, debayerParameters,
nonlinParameters, // nonlinParameters,
colorProcParameters, colorProcParameters,
channelGainParameters, channelGainParameters,
rgbParameters, rgbParameters,
...@@ -2680,7 +2729,7 @@ public class QuadCLT { ...@@ -2680,7 +2729,7 @@ public class QuadCLT {
ImagePlus [] imp_quad, // should have properties "name"(base for saving results), "channel","path" ImagePlus [] imp_quad, // should have properties "name"(base for saving results), "channel","path"
EyesisCorrectionParameters.CLTParameters clt_parameters, EyesisCorrectionParameters.CLTParameters clt_parameters,
EyesisCorrectionParameters.DebayerParameters debayerParameters, EyesisCorrectionParameters.DebayerParameters debayerParameters,
EyesisCorrectionParameters.NonlinParameters nonlinParameters, // EyesisCorrectionParameters.NonlinParameters nonlinParameters,
EyesisCorrectionParameters.ColorProcParameters colorProcParameters, EyesisCorrectionParameters.ColorProcParameters colorProcParameters,
CorrectionColorProc.ColorGainsParameters channelGainParameters, CorrectionColorProc.ColorGainsParameters channelGainParameters,
EyesisCorrectionParameters.RGBParameters rgbParameters, EyesisCorrectionParameters.RGBParameters rgbParameters,
...@@ -2983,11 +3032,11 @@ public class QuadCLT { ...@@ -2983,11 +3032,11 @@ public class QuadCLT {
public void processCLTQuadCorrs( public void processCLTQuadCorrs(
EyesisCorrectionParameters.CLTParameters clt_parameters, EyesisCorrectionParameters.CLTParameters clt_parameters,
EyesisCorrectionParameters.DebayerParameters debayerParameters, EyesisCorrectionParameters.DebayerParameters debayerParameters,
EyesisCorrectionParameters.NonlinParameters nonlinParameters, // EyesisCorrectionParameters.NonlinParameters nonlinParameters,
EyesisCorrectionParameters.ColorProcParameters colorProcParameters, EyesisCorrectionParameters.ColorProcParameters colorProcParameters,
CorrectionColorProc.ColorGainsParameters channelGainParameters, CorrectionColorProc.ColorGainsParameters channelGainParameters,
EyesisCorrectionParameters.RGBParameters rgbParameters, EyesisCorrectionParameters.RGBParameters rgbParameters,
EyesisCorrectionParameters.EquirectangularParameters equirectangularParameters, // EyesisCorrectionParameters.EquirectangularParameters equirectangularParameters,
int convolveFFTSize, // 128 - fft size, kernel size should be size/2 int convolveFFTSize, // 128 - fft size, kernel size should be size/2
final boolean apply_corr, // calculate and apply additional fine geometry correction final boolean apply_corr, // calculate and apply additional fine geometry correction
final boolean infinity_corr, // calculate and apply geometry correction at infinity final boolean infinity_corr, // calculate and apply geometry correction at infinity
...@@ -3015,7 +3064,7 @@ public class QuadCLT { ...@@ -3015,7 +3064,7 @@ public class QuadCLT {
if (correctionsParameters.isJP4()){ if (correctionsParameters.isJP4()){
int subCamera= channels[0]- correctionsParameters.firstSubCamera; // to match those in the sensor files int subCamera= channels[0]- correctionsParameters.firstSubCamera; // to match those in the sensor files
// removeUnusedSensorData should be off!? // removeUnusedSensorData should be off!?
channels=this.eyesisCorrections.pixelMapping.channelsForSubCamera(subCamera); channels=this.eyesisCorrections.pixelMapping.channelsForSubCamera(subCamera); // limit here or disable Error
} }
if (channels!=null){ if (channels!=null){
for (int i=0;i<channels.length;i++) if (eyesisCorrections.isChannelEnabled(channels[i])){ for (int i=0;i<channels.length;i++) if (eyesisCorrections.isChannelEnabled(channels[i])){
...@@ -3035,7 +3084,7 @@ public class QuadCLT { ...@@ -3035,7 +3084,7 @@ public class QuadCLT {
double [] referenceExposures=eyesisCorrections.calcReferenceExposures(debugLevel); // multiply each image by this and divide by individual (if not NaN) double [] referenceExposures=eyesisCorrections.calcReferenceExposures(debugLevel); // multiply each image by this and divide by individual (if not NaN)
int [][] fileIndices=new int [numImagesToProcess][2]; // file index, channel number int [][] fileIndices=new int [numImagesToProcess][2]; // file index, channel number
int index=0; int index=0;
for (int nFile=0;nFile<enabledFiles.length;nFile++){ for (int nFile=0;nFile<enabledFiles.length;nFile++){ // enabledFiles not used anymore?
if ((sourceFiles[nFile]!=null) && (sourceFiles[nFile].length()>1)) { if ((sourceFiles[nFile]!=null) && (sourceFiles[nFile].length()>1)) {
int [] channels={correctionsParameters.getChannelFromSourceTiff(sourceFiles[nFile])}; int [] channels={correctionsParameters.getChannelFromSourceTiff(sourceFiles[nFile])};
if (correctionsParameters.isJP4()){ if (correctionsParameters.isJP4()){
...@@ -3060,18 +3109,23 @@ public class QuadCLT { ...@@ -3060,18 +3109,23 @@ public class QuadCLT {
setNames.add(setName); setNames.add(setName);
setFiles.add(new ArrayList<Integer>()); setFiles.add(new ArrayList<Integer>());
} }
setFiles.get(setNames.indexOf(setName)).add(new Integer(nFile));
//FIXME - similar in other places, extract common code
// setFiles.get(setNames.indexOf(setName)).add(new Integer(nFile));
setFiles.get(setNames.indexOf(setName)).add(new Integer(iImage));
} }
for (int nSet = 0; nSet < setNames.size(); nSet++){ for (int nSet = 0; nSet < setNames.size(); nSet++){
int maxChn = 0; int maxChn = 0;
for (int i = 0; i < setFiles.get(nSet).size(); i++){ for (int i = 0; i < setFiles.get(nSet).size(); i++){
int chn = fileIndices[setFiles.get(nSet).get(i)][1]; int chn = fileIndices[setFiles.get(nSet).get(i)][1]; // wrong,
if (chn > maxChn) maxChn = chn; if (chn > maxChn) maxChn = chn;
} }
int [] channelFiles = new int[maxChn+1]; int [] channelFiles = new int[maxChn+1];
for (int i =0; i < channelFiles.length; i++) channelFiles[i] = -1; for (int i =0; i < channelFiles.length; i++) channelFiles[i] = -1;
for (int i = 0; i < setFiles.get(nSet).size(); i++){ for (int i = 0; i < setFiles.get(nSet).size(); i++){
channelFiles[fileIndices[setFiles.get(nSet).get(i)][1]] = setFiles.get(nSet).get(i); // channelFiles[fileIndices[setFiles.get(nSet).get(i)][1]] = setFiles.get(nSet).get(i);
channelFiles[fileIndices[setFiles.get(nSet).get(i)][1]] = fileIndices[setFiles.get(nSet).get(i)][0];
} }
ImagePlus [] imp_srcs = new ImagePlus[channelFiles.length]; ImagePlus [] imp_srcs = new ImagePlus[channelFiles.length];
...@@ -3311,7 +3365,7 @@ public class QuadCLT { ...@@ -3311,7 +3365,7 @@ public class QuadCLT {
saturation_imp, // boolean [][] saturation_imp, // (near) saturated pixels or null saturation_imp, // boolean [][] saturation_imp, // (near) saturated pixels or null
clt_parameters, clt_parameters,
debayerParameters, debayerParameters,
nonlinParameters, // nonlinParameters,
colorProcParameters, colorProcParameters,
channelGainParameters, channelGainParameters,
rgbParameters, rgbParameters,
...@@ -3557,7 +3611,7 @@ public class QuadCLT { ...@@ -3557,7 +3611,7 @@ public class QuadCLT {
boolean [][] saturation_imp, // (near) saturated pixels or null boolean [][] saturation_imp, // (near) saturated pixels or null
EyesisCorrectionParameters.CLTParameters clt_parameters, EyesisCorrectionParameters.CLTParameters clt_parameters,
EyesisCorrectionParameters.DebayerParameters debayerParameters, EyesisCorrectionParameters.DebayerParameters debayerParameters,
EyesisCorrectionParameters.NonlinParameters nonlinParameters, // EyesisCorrectionParameters.NonlinParameters nonlinParameters,
EyesisCorrectionParameters.ColorProcParameters colorProcParameters, EyesisCorrectionParameters.ColorProcParameters colorProcParameters,
CorrectionColorProc.ColorGainsParameters channelGainParameters, CorrectionColorProc.ColorGainsParameters channelGainParameters,
EyesisCorrectionParameters.RGBParameters rgbParameters, EyesisCorrectionParameters.RGBParameters rgbParameters,
...@@ -4377,7 +4431,11 @@ public class QuadCLT { ...@@ -4377,7 +4431,11 @@ public class QuadCLT {
} }
public void show_fine_corr() public void show_fine_corr()
{ {
show_fine_corr( this.fine_corr, ""); show_fine_corr("");
}
public void show_fine_corr(String prefix)
{
show_fine_corr( this.fine_corr, prefix);
} }
public void show_fine_corr( public void show_fine_corr(
...@@ -4396,7 +4454,8 @@ public class QuadCLT { ...@@ -4396,7 +4454,8 @@ public class QuadCLT {
} }
} }
System.out.println(); System.out.println();
showExtrinsicCorr(); String name = (sadd.length() == 0)?"":("("+sadd+")");
showExtrinsicCorr(name);
} }
...@@ -4405,9 +4464,9 @@ public class QuadCLT { ...@@ -4405,9 +4464,9 @@ public class QuadCLT {
this.fine_corr = new double [4][2][6]; // reset all coefficients to 0 this.fine_corr = new double [4][2][6]; // reset all coefficients to 0
} }
public void showExtrinsicCorr() public void showExtrinsicCorr(String name)
{ {
System.out.println("Extrinsic corrections"); System.out.println("Extrinsic corrections "+name);
if (geometryCorrection == null){ if (geometryCorrection == null){
System.out.println("are not set, will be:"); System.out.println("are not set, will be:");
System.out.println(new GeometryCorrection(this.extrinsic_corr).getCorrVector().toString()); System.out.println(new GeometryCorrection(this.extrinsic_corr).getCorrVector().toString());
...@@ -4430,7 +4489,7 @@ public class QuadCLT { ...@@ -4430,7 +4489,7 @@ public class QuadCLT {
public void cltDisparityScans( public void cltDisparityScans(
EyesisCorrectionParameters.CLTParameters clt_parameters, EyesisCorrectionParameters.CLTParameters clt_parameters,
EyesisCorrectionParameters.DebayerParameters debayerParameters, EyesisCorrectionParameters.DebayerParameters debayerParameters,
EyesisCorrectionParameters.NonlinParameters nonlinParameters, // EyesisCorrectionParameters.NonlinParameters nonlinParameters,
EyesisCorrectionParameters.ColorProcParameters colorProcParameters, EyesisCorrectionParameters.ColorProcParameters colorProcParameters,
CorrectionColorProc.ColorGainsParameters channelGainParameters, CorrectionColorProc.ColorGainsParameters channelGainParameters,
EyesisCorrectionParameters.RGBParameters rgbParameters, EyesisCorrectionParameters.RGBParameters rgbParameters,
...@@ -4678,7 +4737,7 @@ public class QuadCLT { ...@@ -4678,7 +4737,7 @@ public class QuadCLT {
saturation_imp, // boolean [][] saturation_imp, // (near) saturated pixels or null saturation_imp, // boolean [][] saturation_imp, // (near) saturated pixels or null
clt_parameters, clt_parameters,
debayerParameters, debayerParameters,
nonlinParameters, // nonlinParameters,
colorProcParameters, colorProcParameters,
channelGainParameters, channelGainParameters,
rgbParameters, rgbParameters,
...@@ -4705,7 +4764,7 @@ public class QuadCLT { ...@@ -4705,7 +4764,7 @@ public class QuadCLT {
boolean [][] saturation_imp, // (near) saturated pixels or null boolean [][] saturation_imp, // (near) saturated pixels or null
EyesisCorrectionParameters.CLTParameters clt_parameters, EyesisCorrectionParameters.CLTParameters clt_parameters,
EyesisCorrectionParameters.DebayerParameters debayerParameters, EyesisCorrectionParameters.DebayerParameters debayerParameters,
EyesisCorrectionParameters.NonlinParameters nonlinParameters, // EyesisCorrectionParameters.NonlinParameters nonlinParameters,
EyesisCorrectionParameters.ColorProcParameters colorProcParameters, EyesisCorrectionParameters.ColorProcParameters colorProcParameters,
CorrectionColorProc.ColorGainsParameters channelGainParameters, CorrectionColorProc.ColorGainsParameters channelGainParameters,
EyesisCorrectionParameters.RGBParameters rgbParameters, EyesisCorrectionParameters.RGBParameters rgbParameters,
...@@ -5319,7 +5378,7 @@ public class QuadCLT { ...@@ -5319,7 +5378,7 @@ public class QuadCLT {
boolean adjust_poly, boolean adjust_poly,
EyesisCorrectionParameters.CLTParameters clt_parameters, EyesisCorrectionParameters.CLTParameters clt_parameters,
EyesisCorrectionParameters.DebayerParameters debayerParameters, EyesisCorrectionParameters.DebayerParameters debayerParameters,
EyesisCorrectionParameters.NonlinParameters nonlinParameters, // EyesisCorrectionParameters.NonlinParameters nonlinParameters,
EyesisCorrectionParameters.ColorProcParameters colorProcParameters, EyesisCorrectionParameters.ColorProcParameters colorProcParameters,
CorrectionColorProc.ColorGainsParameters channelGainParameters, CorrectionColorProc.ColorGainsParameters channelGainParameters,
EyesisCorrectionParameters.RGBParameters rgbParameters, EyesisCorrectionParameters.RGBParameters rgbParameters,
...@@ -5576,9 +5635,9 @@ public class QuadCLT { ...@@ -5576,9 +5635,9 @@ public class QuadCLT {
saturation_imp, // boolean [][] saturation_imp, // (near) saturated pixels or null saturation_imp, // boolean [][] saturation_imp, // (near) saturated pixels or null
clt_parameters, clt_parameters,
debayerParameters, debayerParameters,
nonlinParameters, // nonlinParameters,
colorProcParameters, colorProcParameters,
channelGainParameters, // channelGainParameters,
rgbParameters, rgbParameters,
threadsMax, // maximal number of threads to launch threadsMax, // maximal number of threads to launch
updateStatus, updateStatus,
...@@ -5602,7 +5661,7 @@ public class QuadCLT { ...@@ -5602,7 +5661,7 @@ public class QuadCLT {
imp_srcs, // [srcChannel], // should have properties "name"(base for saving results), "channel","path" imp_srcs, // [srcChannel], // should have properties "name"(base for saving results), "channel","path"
clt_parameters, clt_parameters,
debayerParameters, debayerParameters,
nonlinParameters, // nonlinParameters,
colorProcParameters, colorProcParameters,
channelGainParameters, channelGainParameters,
rgbParameters, rgbParameters,
...@@ -5629,9 +5688,9 @@ public class QuadCLT { ...@@ -5629,9 +5688,9 @@ public class QuadCLT {
boolean [][] saturation_imp, // (near) saturated pixels or null boolean [][] saturation_imp, // (near) saturated pixels or null
EyesisCorrectionParameters.CLTParameters clt_parameters, EyesisCorrectionParameters.CLTParameters clt_parameters,
EyesisCorrectionParameters.DebayerParameters debayerParameters, EyesisCorrectionParameters.DebayerParameters debayerParameters,
EyesisCorrectionParameters.NonlinParameters nonlinParameters, // EyesisCorrectionParameters.NonlinParameters nonlinParameters,
EyesisCorrectionParameters.ColorProcParameters colorProcParameters, EyesisCorrectionParameters.ColorProcParameters colorProcParameters,
CorrectionColorProc.ColorGainsParameters channelGainParameters, // CorrectionColorProc.ColorGainsParameters channelGainParameters,
EyesisCorrectionParameters.RGBParameters rgbParameters, EyesisCorrectionParameters.RGBParameters rgbParameters,
final int threadsMax, // maximal number of threads to launch final int threadsMax, // maximal number of threads to launch
final boolean updateStatus, final boolean updateStatus,
...@@ -6381,7 +6440,7 @@ public class QuadCLT { ...@@ -6381,7 +6440,7 @@ public class QuadCLT {
ImagePlus [] imp_quad, // should have properties "name"(base for saving results), "channel","path" ImagePlus [] imp_quad, // should have properties "name"(base for saving results), "channel","path"
EyesisCorrectionParameters.CLTParameters clt_parameters, EyesisCorrectionParameters.CLTParameters clt_parameters,
EyesisCorrectionParameters.DebayerParameters debayerParameters, EyesisCorrectionParameters.DebayerParameters debayerParameters,
EyesisCorrectionParameters.NonlinParameters nonlinParameters, // EyesisCorrectionParameters.NonlinParameters nonlinParameters,
EyesisCorrectionParameters.ColorProcParameters colorProcParameters, EyesisCorrectionParameters.ColorProcParameters colorProcParameters,
CorrectionColorProc.ColorGainsParameters channelGainParameters, CorrectionColorProc.ColorGainsParameters channelGainParameters,
EyesisCorrectionParameters.RGBParameters rgbParameters, EyesisCorrectionParameters.RGBParameters rgbParameters,
...@@ -8422,7 +8481,7 @@ public class QuadCLT { ...@@ -8422,7 +8481,7 @@ public class QuadCLT {
public void batchCLT3d( public void batchCLT3d(
EyesisCorrectionParameters.CLTParameters clt_parameters, EyesisCorrectionParameters.CLTParameters clt_parameters,
EyesisCorrectionParameters.DebayerParameters debayerParameters, EyesisCorrectionParameters.DebayerParameters debayerParameters,
EyesisCorrectionParameters.NonlinParameters nonlinParameters, // EyesisCorrectionParameters.NonlinParameters nonlinParameters,
EyesisCorrectionParameters.ColorProcParameters colorProcParameters, EyesisCorrectionParameters.ColorProcParameters colorProcParameters,
CorrectionColorProc.ColorGainsParameters channelGainParameters, CorrectionColorProc.ColorGainsParameters channelGainParameters,
EyesisCorrectionParameters.RGBParameters rgbParameters, EyesisCorrectionParameters.RGBParameters rgbParameters,
...@@ -8585,9 +8644,9 @@ public class QuadCLT { ...@@ -8585,9 +8644,9 @@ public class QuadCLT {
saturation_imp, // boolean [][] saturation_imp, // (near) saturated pixels or null saturation_imp, // boolean [][] saturation_imp, // (near) saturated pixels or null
clt_parameters, clt_parameters,
debayerParameters, debayerParameters,
nonlinParameters, // nonlinParameters,
colorProcParameters, colorProcParameters,
channelGainParameters, // channelGainParameters,
rgbParameters, rgbParameters,
threadsMax, // maximal number of threads to launch threadsMax, // maximal number of threads to launch
updateStatus, updateStatus,
...@@ -8609,9 +8668,9 @@ public class QuadCLT { ...@@ -8609,9 +8668,9 @@ public class QuadCLT {
saturation_imp, // boolean [][] saturation_imp, // (near) saturated pixels or null saturation_imp, // boolean [][] saturation_imp, // (near) saturated pixels or null
clt_parameters, clt_parameters,
debayerParameters, debayerParameters,
nonlinParameters, // nonlinParameters,
colorProcParameters, colorProcParameters,
channelGainParameters, // channelGainParameters,
rgbParameters, rgbParameters,
threadsMax, // maximal number of threads to launch threadsMax, // maximal number of threads to launch
updateStatus, updateStatus,
...@@ -8633,7 +8692,7 @@ public class QuadCLT { ...@@ -8633,7 +8692,7 @@ public class QuadCLT {
saturation_imp, // boolean [][] saturation_imp, // (near) saturated pixels or null saturation_imp, // boolean [][] saturation_imp, // (near) saturated pixels or null
clt_parameters, clt_parameters,
debayerParameters, debayerParameters,
nonlinParameters, // nonlinParameters,
colorProcParameters, colorProcParameters,
channelGainParameters, channelGainParameters,
rgbParameters, rgbParameters,
...@@ -8652,9 +8711,9 @@ public class QuadCLT { ...@@ -8652,9 +8711,9 @@ public class QuadCLT {
saturation_imp, // boolean [][] saturation_imp, // (near) saturated pixels or null saturation_imp, // boolean [][] saturation_imp, // (near) saturated pixels or null
clt_parameters, clt_parameters,
debayerParameters, debayerParameters,
nonlinParameters, // nonlinParameters,
colorProcParameters, colorProcParameters,
channelGainParameters, // channelGainParameters,
rgbParameters, rgbParameters,
threadsMax, // maximal number of threads to launch threadsMax, // maximal number of threads to launch
updateStatus, updateStatus,
...@@ -8665,7 +8724,7 @@ public class QuadCLT { ...@@ -8665,7 +8724,7 @@ public class QuadCLT {
imp_srcs, // [srcChannel], // should have properties "name"(base for saving results), "channel","path" imp_srcs, // [srcChannel], // should have properties "name"(base for saving results), "channel","path"
clt_parameters, clt_parameters,
debayerParameters, debayerParameters,
nonlinParameters, // nonlinParameters,
colorProcParameters, colorProcParameters,
channelGainParameters, channelGainParameters,
rgbParameters, rgbParameters,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment