Commit 3ca96c86 authored by Andrey Filippov's avatar Andrey Filippov

Converte conditioning to multithreaded, started noise comparison

parent 5b919b27
......@@ -41,6 +41,7 @@ import com.elphel.imagej.common.ShowDoubleFloatArrays;
import com.elphel.imagej.jp4.JP46_Reader_camera;
import com.elphel.imagej.readers.EyesisTiff;
import com.elphel.imagej.readers.ImagejJp4Tiff;
import com.elphel.imagej.tileprocessor.ImageDtt;
import ij.CompositeImage;
import ij.IJ;
......@@ -59,7 +60,7 @@ import loci.formats.FormatException;
public class EyesisCorrections {
public JP46_Reader_camera JP4_INSTANCE= new JP46_Reader_camera(false);
public ImagejJp4Tiff imagejJp4Tiff = new ImagejJp4Tiff();
// public ImagejJp4Tiff imagejJp4Tiff = new ImagejJp4Tiff();
ShowDoubleFloatArrays SDFA_INSTANCE= new ShowDoubleFloatArrays();
DebayerScissorsClass debayerScissors=null;
......@@ -253,7 +254,7 @@ public class EyesisCorrections {
int src_channel = correctionsParameters.getChannelFromSourceTiff(path);
int sub_camera = src_channel - correctionsParameters.firstSubCamera;
int subchannel= pixelMapping.getSubChannelSilent(sub_camera); // only used for demux
ImagejJp4Tiff imagejJp4Tiff = new ImagejJp4Tiff(); // override global
ImagePlus imp = null;
try {
imp = imagejJp4Tiff.readTiffJp4(
......@@ -602,8 +603,8 @@ public class EyesisCorrections {
}
public void createChannelVignetting(
boolean correct_vignetting){
final boolean correct_vignetting){
final int threadsMax = 100;
/// this.channelWidthHeight=new int [this.usedChannels.length][];
this.channelVignettingCorrection=new float [this.usedChannels.length][];
this.defectsXY=new int [this.usedChannels.length][][];
......@@ -616,17 +617,25 @@ public class EyesisCorrections {
this.defectsDiff[nChn]=null;
}
int [][] bayer={{1,0},{2,1}}; // GR/BG
final Thread[] threads = ImageDtt.newThreadArray(threadsMax);
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
ImagePlus imp=null,imp_composite=null;
for (int nFile=0;nFile<correctionsParameters.getSourcePaths().length;nFile++){
ImagejJp4Tiff imagejJp4Tiff = new ImagejJp4Tiff(); // override instance-global
//for (int nFile=0;nFile<correctionsParameters.getSourcePaths().length;nFile++){
for (int nFile = ai.getAndIncrement(); nFile < correctionsParameters.getSourcePaths().length; nFile = ai.getAndIncrement()) {
int [] channels={correctionsParameters.getChannelFromSourceTiff(correctionsParameters.getSourcePaths()[nFile])};
if (!this.pixelMapping.subcamerasUsed()) {
channels = this.pixelMapping.channelsForSubCamera(channels[0]-correctionsParameters.firstSubCameraConfig); // index in calibration files matching this source
if (!pixelMapping.subcamerasUsed()) {
channels = pixelMapping.channelsForSubCamera(channels[0]-correctionsParameters.firstSubCameraConfig); // index in calibration files matching this source
} else if (correctionsParameters.isJP4()){
int subCamera= channels[0]- correctionsParameters.firstSubCamera; // to match those in the sensor files
channels=this.pixelMapping.channelsForSubCamera(subCamera);
channels=pixelMapping.channelsForSubCamera(subCamera);
}
if (this.pixelMapping.isChannelAvailable(channels)) { //channels!=null) {
if (pixelMapping.isChannelAvailable(channels)) { //channels!=null) {
imp=null;
imp_composite=null;
......@@ -640,17 +649,17 @@ public class EyesisCorrections {
} // throws IOException, FormatException { // std - include non-elphel properties with prefix std
if ((imp==null) && (imp_composite==null)) {
if (this.debugLevel>0) System.out.println("createChannelVignetting(): can not open "+correctionsParameters.getSourcePaths()[nFile]+
if (debugLevel>0) System.out.println("createChannelVignetting(): can not open "+correctionsParameters.getSourcePaths()[nFile]+
" as "+(correctionsParameters.isJP4()?"JP4":"TIFF")+" file");
continue;
}
for (int chn=0;chn<channels.length;chn++) {
int srcChannel=channels[chn];
/// if ((this.channelWidthHeight[srcChannel]==null) && this.pixelMapping.isChannelAvailable(srcChannel)){
if (this.pixelMapping.isChannelAvailable(srcChannel)){
int subChannel=this.pixelMapping.getSubChannel(srcChannel);
/// if ((channelWidthHeight[srcChannel]==null) && pixelMapping.isChannelAvailable(srcChannel)){
if (pixelMapping.isChannelAvailable(srcChannel)){
int subChannel=pixelMapping.getSubChannel(srcChannel);
if (correct_vignetting) {
if (this.correctionsParameters.swapSubchannels01) {
if (correctionsParameters.swapSubchannels01) {
switch (subChannel){
case 0: subChannel=1; break;
case 1: subChannel=0; break;
......@@ -664,8 +673,8 @@ public class EyesisCorrections {
" channels.length="+channels.length);
for (int i=0;i<channels.length;i++) System.out.print(" "+channels[i]);
System.out.println();
for (int i=0;i<this.usedChannels.length;i++) if (this.usedChannels[i]) {
System.out.println(i+": subCamera="+this.pixelMapping.sensors[i].subcamera);
for (int i=0;i<usedChannels.length;i++) if (usedChannels[i]) {
System.out.println(i+": subCamera="+pixelMapping.sensors[i].subcamera);
}
}
......@@ -679,46 +688,46 @@ public class EyesisCorrections {
imp, // ImagePlus imp_src,
pixelMapping.sensors[srcChannel].getSensorWH(),
true); // boolean replicate);
this.channelVignettingCorrection[srcChannel]=this.pixelMapping.getBayerFlatFieldFloat(
channelVignettingCorrection[srcChannel]=pixelMapping.getBayerFlatFieldFloat(
srcChannel,
bayer,
1.5); // TODO: Make range configurable, improve FF interpolation in calibraion
} else { // no vignetting correction
int [] wh = this.pixelMapping.getSensorWH(srcChannel);
this.channelVignettingCorrection[srcChannel] = new float [wh[0]*wh[1]];
for (int i = 0; i < this.channelVignettingCorrection[srcChannel].length; i++) {
this.channelVignettingCorrection[srcChannel][i] = 1.0f;
int [] wh = pixelMapping.getSensorWH(srcChannel);
channelVignettingCorrection[srcChannel] = new float [wh[0]*wh[1]];
for (int i = 0; i < channelVignettingCorrection[srcChannel].length; i++) {
channelVignettingCorrection[srcChannel][i] = 1.0f;
}
}
if (this.debugLevel>0){
if (debugLevel>0){
SDFA_INSTANCE.showArrays(
this.channelVignettingCorrection[srcChannel],
this.pixelMapping.sensors[srcChannel].pixelCorrectionWidth,
this.pixelMapping.sensors[srcChannel].pixelCorrectionHeight,
channelVignettingCorrection[srcChannel],
pixelMapping.sensors[srcChannel].pixelCorrectionWidth,
pixelMapping.sensors[srcChannel].pixelCorrectionHeight,
"Vingetting-"+srcChannel
);
}
if (this.debugLevel>0){
if (debugLevel>0){
System.out.println("Created vignetting info for channel "+srcChannel+
" subchannel="+subChannel+" ("+
correctionsParameters.getSourcePaths()[nFile]+")");
/// System.out.println("imageWidth= "+this.channelWidthHeight[srcChannel][0]+" imageHeight="+this.channelWidthHeight[srcChannel][1]);
/// System.out.println("imageWidth= "+channelWidthHeight[srcChannel][0]+" imageHeight="+channelWidthHeight[srcChannel][1]);
}
this.defectsXY[srcChannel]=this.pixelMapping.getDefectsXY(srcChannel);
this.defectsDiff[srcChannel]=this.pixelMapping.getDefectsDiff(srcChannel);
if (this.debugLevel>0){
if (this.defectsXY[srcChannel]==null){
defectsXY[srcChannel]=pixelMapping.getDefectsXY(srcChannel);
defectsDiff[srcChannel]=pixelMapping.getDefectsDiff(srcChannel);
if (debugLevel>0){
if (defectsXY[srcChannel]==null){
System.out.println("No pixel defects info is available for channel "+srcChannel);
} else {
System.out.println("Extracted "+this.defectsXY[srcChannel].length+" pixel outliers for channel "+srcChannel+
System.out.println("Extracted "+defectsXY[srcChannel].length+" pixel outliers for channel "+srcChannel+
" (x:y:difference");
int numInLine=8;
for (int i=0;i<this.defectsXY[srcChannel].length;i++){
System.out.print(this.defectsXY[srcChannel][0]+":"+this.defectsXY[srcChannel][1]);
if ((this.defectsDiff[srcChannel]!=null) && (this.defectsDiff[srcChannel].length>i)){
System.out.print(":"+IJ.d2s(this.defectsDiff[srcChannel][i],3)+" ");
for (int i=0;i<defectsXY[srcChannel].length;i++){
System.out.print(defectsXY[srcChannel][0]+":"+defectsXY[srcChannel][1]);
if ((defectsDiff[srcChannel]!=null) && (defectsDiff[srcChannel].length>i)){
System.out.print(":"+IJ.d2s(defectsDiff[srcChannel][i],3)+" ");
}
if (((i%numInLine)==(numInLine-1)) || (i == (this.defectsXY[srcChannel].length-1))) System.out.println();
if (((i%numInLine)==(numInLine-1)) || (i == (defectsXY[srcChannel].length-1))) System.out.println();
}
}
}
......@@ -727,6 +736,10 @@ public class EyesisCorrections {
}
}
}
};
}
ImageDtt.startAndJoin(threads);
}
boolean [] usedChannels(String [] paths){
return usedChannels(paths, false);
......
......@@ -720,8 +720,10 @@ private Panel panel1,
addButton("Inter Accumulate", panelClt5, color_process);
addButton("Aux Inter Accumulate", panelClt5, color_process);
addButton("Inter Noise", panelClt5, color_process);
addButton("Inter Noise Aux", panelClt5, color_process);
addButton("Inter Debug Noise", panelClt5, color_report);
addButton("Noise Stats", panelClt5, color_process);
addButton("Noise Stats Aux", panelClt5, color_process);
addButton("Test 1D", panelClt5, color_process);
addButton("Colorize Depth", panelClt5, color_process);
plugInFrame.add(panelClt5);
......@@ -5203,15 +5205,23 @@ private Panel panel1,
DEBUG_LEVEL=MASTER_DEBUG_LEVEL;
EYESIS_CORRECTIONS.setDebug(DEBUG_LEVEL);
CLT_PARAMETERS.batch_run = true;
intersceneNoise(false); // boolean bayer_artifacts_debug);
intersceneNoise(false, false); // boolean bayer_artifacts_debug);
return;
/* ======================================================================== */
} else if (label.equals("Inter Noise Aux")) {
DEBUG_LEVEL=MASTER_DEBUG_LEVEL;
EYESIS_CORRECTIONS.setDebug(DEBUG_LEVEL);
CLT_PARAMETERS.batch_run = true;
intersceneNoise(true, false); // boolean bayer_artifacts_debug);
return;
/* ======================================================================== */
} else if (label.equals("Inter Debug Noise")) {
DEBUG_LEVEL=MASTER_DEBUG_LEVEL;
EYESIS_CORRECTIONS.setDebug(DEBUG_LEVEL);
CLT_PARAMETERS.batch_run = true;
intersceneNoise(true); // boolean bayer_artifacts_debug);
intersceneNoise(false, true); // boolean bayer_artifacts_debug);
return;
/* ======================================================================== */
......@@ -5219,8 +5229,17 @@ private Panel panel1,
DEBUG_LEVEL=MASTER_DEBUG_LEVEL;
EYESIS_CORRECTIONS.setDebug(DEBUG_LEVEL);
CLT_PARAMETERS.batch_run = true;
intersceneNoiseStats();
intersceneNoiseStats(false);
return;
/* ======================================================================== */
} else if (label.equals("Noise Stats Aux")) {
DEBUG_LEVEL=MASTER_DEBUG_LEVEL;
EYESIS_CORRECTIONS.setDebug(DEBUG_LEVEL);
CLT_PARAMETERS.batch_run = true;
intersceneNoiseStats(true);
return;
/* ======================================================================== */
} else if (label.equals("Colorize Depth")) {
DEBUG_LEVEL=MASTER_DEBUG_LEVEL;
......@@ -7095,7 +7114,9 @@ private Panel panel1,
return true;
}
public boolean intersceneNoise(boolean bayer_artifacts_debug) {
public boolean intersceneNoise(
boolean use_aux,
boolean bayer_artifacts_debug) {
long startTime=System.nanoTime();
// load needed sensor and kernels files
if (!prepareRigImages()) return false;
......@@ -7105,7 +7126,7 @@ private Panel panel1,
if (DEBUG_LEVEL > -2){
System.out.println("++++++++++++++ Testing Interscene processing ++++++++++++++");
}
/*
if (CLT_PARAMETERS.useGPU()) { // only init GPU instances if it is used
if (GPU_TILE_PROCESSOR == null) {
try {
......@@ -7132,15 +7153,62 @@ private Panel panel1,
QUAD_CLT.setGPU(GPU_QUAD);
}
}
*/
if (CLT_PARAMETERS.useGPU()) { // only init GPU instances if it is used
if (GPU_TILE_PROCESSOR == null) {
try {
GPU_TILE_PROCESSOR = new GPUTileProcessor(CORRECTION_PARAMETERS.tile_processor_gpu);
} catch (Exception e) {
System.out.println("Failed to initialize GPU class");
// TODO Auto-generated catch block
e.printStackTrace();
return false;
} //final int debugLevel);
}
if (use_aux) {
if (CLT_PARAMETERS.useGPU(true) && (QUAD_CLT_AUX != null) && (GPU_QUAD_AUX == null)) { // if GPU AUX is needed
try {
GPU_QUAD_AUX = new GpuQuad(//
GPU_TILE_PROCESSOR, QUAD_CLT_AUX,
4,
3);
} catch (Exception e) {
System.out.println("Failed to initialize GpuQuad class");
// TODO Auto-generated catch block
e.printStackTrace();
return false;
} //final int debugLevel);
QUAD_CLT_AUX.setGPU(GPU_QUAD_AUX);
}
} else {
if (CLT_PARAMETERS.useGPU(false) && (QUAD_CLT != null) && (GPU_QUAD == null)) { // if GPU main is needed
try {
GPU_QUAD = new GpuQuad(
GPU_TILE_PROCESSOR, QUAD_CLT,
4,
3);
} catch (Exception e) {
System.out.println("Failed to initialize GpuQuad class");
// TODO Auto-generated catch block
e.printStackTrace();
return false;
} //final int debugLevel);
QUAD_CLT.setGPU(GPU_QUAD);
}
}
}
QuadCLT quadCLT = use_aux ? QUAD_CLT_AUX : QUAD_CLT;
ColorProcParameters colorProcParameters = use_aux ? COLOR_PROC_PARAMETERS_AUX : COLOR_PROC_PARAMETERS;
try {
TWO_QUAD_CLT.intersceneNoise(
QUAD_CLT, // QuadCLT quadCLT_main,
quadCLT, // QuadCLT quadCLT_main,
CLT_PARAMETERS, // EyesisCorrectionParameters.DCTParameters dct_parameters,
DEBAYER_PARAMETERS, //EyesisCorrectionParameters.DebayerParameters debayerParameters,
COLOR_PROC_PARAMETERS, //EyesisCorrectionParameters.ColorProcParameters colorProcParameters,
CHANNEL_GAINS_PARAMETERS, //CorrectionColorProc.ColorGainsParameters channelGainParameters,
RGB_PARAMETERS, //EyesisCorrectionParameters.RGBParameters rgbParameters,
DEBAYER_PARAMETERS, // EyesisCorrectionParameters.DebayerParameters debayerParameters,
colorProcParameters, // EyesisCorrectionParameters.ColorProcParameters colorProcParameters,
CHANNEL_GAINS_PARAMETERS, // CorrectionColorProc.ColorGainsParameters channelGainParameters,
RGB_PARAMETERS, // EyesisCorrectionParameters.RGBParameters rgbParameters,
EQUIRECTANGULAR_PARAMETERS, // EyesisCorrectionParameters.EquirectangularParameters equirectangularParameters,
PROPERTIES, // Properties properties,
bayer_artifacts_debug, // boolean bayer_artifacts_debug
......@@ -7164,7 +7232,7 @@ private Panel panel1,
return true;
}
public boolean intersceneNoiseStats() {
public boolean intersceneNoiseStats(boolean use_aux) {
long startTime=System.nanoTime();
// load needed sensor and kernels files
if (!prepareRigImages()) return false;
......@@ -7174,7 +7242,7 @@ private Panel panel1,
if (DEBUG_LEVEL > -2){
System.out.println("++++++++++++++ Testing Interscene processing ++++++++++++++");
}
/*
if (CLT_PARAMETERS.useGPU()) { // only init GPU instances if it is used
if (GPU_TILE_PROCESSOR == null) {
try {
......@@ -7201,13 +7269,60 @@ private Panel panel1,
QUAD_CLT.setGPU(GPU_QUAD);
}
}
*/
if (CLT_PARAMETERS.useGPU()) { // only init GPU instances if it is used
if (GPU_TILE_PROCESSOR == null) {
try {
GPU_TILE_PROCESSOR = new GPUTileProcessor(CORRECTION_PARAMETERS.tile_processor_gpu);
} catch (Exception e) {
System.out.println("Failed to initialize GPU class");
// TODO Auto-generated catch block
e.printStackTrace();
return false;
} //final int debugLevel);
}
if (use_aux) {
if (CLT_PARAMETERS.useGPU(true) && (QUAD_CLT_AUX != null) && (GPU_QUAD_AUX == null)) { // if GPU AUX is needed
try {
GPU_QUAD_AUX = new GpuQuad(//
GPU_TILE_PROCESSOR, QUAD_CLT_AUX,
4,
3);
} catch (Exception e) {
System.out.println("Failed to initialize GpuQuad class");
// TODO Auto-generated catch block
e.printStackTrace();
return false;
} //final int debugLevel);
QUAD_CLT_AUX.setGPU(GPU_QUAD_AUX);
}
} else {
if (CLT_PARAMETERS.useGPU(false) && (QUAD_CLT != null) && (GPU_QUAD == null)) { // if GPU main is needed
try {
GPU_QUAD = new GpuQuad(
GPU_TILE_PROCESSOR, QUAD_CLT,
4,
3);
} catch (Exception e) {
System.out.println("Failed to initialize GpuQuad class");
// TODO Auto-generated catch block
e.printStackTrace();
return false;
} //final int debugLevel);
QUAD_CLT.setGPU(GPU_QUAD);
}
}
}
QuadCLT quadCLT = use_aux ? QUAD_CLT_AUX : QUAD_CLT;
ColorProcParameters colorProcParameters = use_aux ? COLOR_PROC_PARAMETERS_AUX : COLOR_PROC_PARAMETERS;
try {
TWO_QUAD_CLT.intersceneNoiseStats(
QUAD_CLT, // QuadCLT quadCLT_main,
quadCLT, // QUAD_CLT, // QuadCLT quadCLT_main,
CLT_PARAMETERS, // EyesisCorrectionParameters.DCTParameters dct_parameters,
DEBAYER_PARAMETERS, //EyesisCorrectionParameters.DebayerParameters debayerParameters,
COLOR_PROC_PARAMETERS, //EyesisCorrectionParameters.ColorProcParameters colorProcParameters,
colorProcParameters, // COLOR_PROC_PARAMETERS, //EyesisCorrectionParameters.ColorProcParameters colorProcParameters,
CHANNEL_GAINS_PARAMETERS, //CorrectionColorProc.ColorGainsParameters channelGainParameters,
RGB_PARAMETERS, //EyesisCorrectionParameters.RGBParameters rgbParameters,
EQUIRECTANGULAR_PARAMETERS, // EyesisCorrectionParameters.EquirectangularParameters equirectangularParameters,
......
......@@ -16693,8 +16693,6 @@ public class ImageDttCPU {
final int width,
final TpTask [] tp_tasks,
final TpTask [] tp_tasks_target, // null or wider array to provide target disparity for neighbors
// final double [][] disparity_array, // [tilesY][tilesX] - individual per-tile expected disparity
// final double disparity_corr, // apply to disparity array data only, tp_tasks are already corrected
final ImageDttParameters imgdtt_params, // Now just extra correlation parameters, later will include, most others
// dcorr_td should be either null, or double [tp_tasks.length][][];
final double [][][][] dcorr_td, // [tile][pair][4][64] sparse by pair transform domain representation of corr pairs
......@@ -16703,9 +16701,6 @@ public class ImageDttCPU {
final GeometryCorrection geometryCorrection,
final int kernel_step,
final int window_type,
// final double disparity_corr, should be aapplied to tp_tasks already!
final double corr_red,
final double corr_blue,
// related to tilt
......
......@@ -2017,7 +2017,7 @@ public class QuadCLT extends QuadCLTCPU {
true); //newAllowed, // save
String file_name = image_name + suffix;
String file_path = x3d_path + Prefs.getFileSeparator() + file_name + ".tiff";
if (getGPU().getQuadCLT() != this) {
if ((getGPU() != null) && (getGPU().getQuadCLT() != this)) {
getGPU().updateQuadCLT(this); // to re-load new set of Bayer images to the GPU
}
......@@ -2034,8 +2034,8 @@ public class QuadCLT extends QuadCLTCPU {
threadsMax, // final int threadsMax, // maximal number of threads to launch
false, // final boolean updateStatus,
debugLevel); // final int debugLevel);
FileSaver fs=new FileSaver(img_noise);
fs.saveAsTiff(file_path);
// FileSaver fs=new FileSaver(img_noise); // is null, will be saved inside to /home/elphel/lwir16-proc/proc1/results_cuda/1626032208_613623-AUX-SHIFTED-D0.0
// fs.saveAsTiff(file_path);
}
public ImagePlus processCLTQuadCorrGPU(
......@@ -2054,7 +2054,7 @@ public class QuadCLT extends QuadCLTCPU {
if (gpuQuad == null) {
System.out.println("GPU instance is not initialized, using CPU mode");
processCLTQuadCorrCPU(
imp_quad, // ImagePlus [] imp_quad, // should have properties "name"(base for saving results), "channel","path"
// imp_quad, // ImagePlus [] imp_quad, // should have properties "name"(base for saving results), "channel","path"
saturation_imp, // boolean [][] saturation_imp, // (near) saturated pixels or null // Not needed use this.saturation_imp
clt_parameters, // CLTParameters clt_parameters,
debayerParameters, // EyesisCorrectionParameters.DebayerParameters debayerParameters,
......
......@@ -9329,7 +9329,7 @@ if (debugLevel > -100) return true; // temporarily !
if (updateStatus) IJ.showStatus("CPU: Rendering 4 image set (disparity = 0) for "+quadCLT_main.image_name+ "and a thumb nail");
quadCLT_main.processCLTQuadCorrCPU( // returns ImagePlus, but it already should be saved/shown
imp_srcs_main, // [srcChannel], // should have properties "name"(base for saving results), "channel","path"
// imp_srcs_main, // [srcChannel], // should have properties "name"(base for saving results), "channel","path"
saturation_imp_main, // boolean [][] saturation_imp, // (near) saturated pixels or null
clt_parameters,
debayerParameters,
......@@ -9469,8 +9469,10 @@ if (debugLevel > -100) return true; // temporarily !
} else { // if (quadCLT_main.correctionsParameters.clt_batch_explore) {
int num_restored = 0;
try {
num_restored = quadCLT_main.restoreDSI(DSI_MAIN_SUFFIX, // "-DSI_COMBO", "-DSI_MAIN"
dsi);
num_restored = quadCLT_main.restoreDSI(
DSI_MAIN_SUFFIX, // "-DSI_COMBO", "-DSI_MAIN"
dsi,
false);
} catch (Exception e) {
......@@ -9622,7 +9624,7 @@ if (debugLevel > -100) return true; // temporarily !
if (updateStatus) IJ.showStatus("Rendering 4 AUX image set (disparity = 0) for "+quadCLT_aux.image_name);
quadCLT_aux.processCLTQuadCorrCPU( // returns ImagePlus, but it already should be saved/shown
imp_srcs_aux, // [srcChannel], // should have properties "name"(base for saving results), "channel","path"
// imp_srcs_aux, // [srcChannel], // should have properties "name"(base for saving results), "channel","path"
saturation_imp_aux, // boolean [][] saturation_imp, // (near) saturated pixels or null
clt_parameters,
debayerParameters,
......@@ -9674,7 +9676,9 @@ if (debugLevel > -100) return true; // temporarily !
dsi[DSI_DISPARITY_AUX_LMA] = aux_last_scan[2];
// quadCLT_main.saveDSIMain (dsi);
quadCLT_aux.saveDSIAll (dsi);
quadCLT_aux.saveDSIAll (
"-DSI_MAIN", // String suffix, // "-DSI_MAIN"
dsi);
if (clt_parameters.rig.ml_copyJP4) {
copyJP4src(
set_name, // String set_name
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment