Commit 5578f549 authored by Andrey Filippov's avatar Andrey Filippov

tweaking clt processing

parent 1b78d16a
......@@ -1873,16 +1873,16 @@ public class EyesisCorrectionParameters {
public int clt_window = 1; // currently only 3 types of windows - 0 (none), 1 and 2
public double shift_x = 0.0;
public double shift_y = 0.0;
public int iclt_mask = 15; // which transforms to combine
public int tileX = 258; // number of kernel tile (0..163)
public int tileY = 133; // number of kernel tile (0..122)
public int dbg_mode = 0; // 0 - normal, +1 - no DCT/IDCT
public int ishift_x = 0; // debug feature - shift source image by this pixels left
public int ishift_y = 0; // debug feature - shift source image by this pixels down
public double fat_zero = 0.0; // modify phase correlation to prevent division by very small numbers
public double corr_sigma = 0.8; // LPF correlarion sigma
public int iclt_mask = 15; // which transforms to combine
public int tileX = 258; // number of kernel tile (0..163)
public int tileY = 133; // number of kernel tile (0..122)
public int dbg_mode = 0; // 0 - normal, +1 - no DCT/IDCT
public int ishift_x = 0; // debug feature - shift source image by this pixels left
public int ishift_y = 0; // debug feature - shift source image by this pixels down
public double fat_zero = 0.0; // modify phase correlation to prevent division by very small numbers
public double corr_sigma = 0.8; // LPF correlarion sigma
public boolean norm_kern = true; // normalize kernels
public boolean gains_equalize = true; // equalize channel color gains among all cameras
public double novignetting_r = 0.2644; // reg gain in the center of sensor calibration R (instead of vignetting)
public double novignetting_g = 0.3733; // green gain in the center of sensor calibration G
public double novignetting_b = 0.2034; // blue gain in the center of sensor calibration B
......@@ -1908,9 +1908,11 @@ public class EyesisCorrectionParameters {
properties.setProperty(prefix+"fat_zero", this.fat_zero+"");
properties.setProperty(prefix+"corr_sigma", this.corr_sigma+"");
properties.setProperty(prefix+"norm_kern", this.norm_kern+"");
properties.setProperty(prefix+"gains_equalize", this.gains_equalize+"");
properties.setProperty(prefix+"novignetting_r", this.novignetting_r+"");
properties.setProperty(prefix+"novignetting_g", this.novignetting_g+"");
properties.setProperty(prefix+"novignetting_b", this.novignetting_b+"");
properties.setProperty(prefix+"scale_r", this.scale_r+"");
properties.setProperty(prefix+"scale_g", this.scale_g+"");
properties.setProperty(prefix+"scale_b", this.scale_b+"");
......@@ -1933,6 +1935,7 @@ public class EyesisCorrectionParameters {
if (properties.getProperty(prefix+"fat_zero")!=null) this.fat_zero=Double.parseDouble(properties.getProperty(prefix+"fat_zero"));
if (properties.getProperty(prefix+"corr_sigma")!=null) this.corr_sigma=Double.parseDouble(properties.getProperty(prefix+"corr_sigma"));
if (properties.getProperty(prefix+"norm_kern")!=null) this.norm_kern=Boolean.parseBoolean(properties.getProperty(prefix+"norm_kern"));
if (properties.getProperty(prefix+"gains_equalize")!=null) this.gains_equalize=Boolean.parseBoolean(properties.getProperty(prefix+"gains_equalize"));
if (properties.getProperty(prefix+"novignetting_r")!=null) this.novignetting_r=Double.parseDouble(properties.getProperty(prefix+"novignetting_r"));
if (properties.getProperty(prefix+"novignetting_g")!=null) this.novignetting_g=Double.parseDouble(properties.getProperty(prefix+"novignetting_g"));
if (properties.getProperty(prefix+"novignetting_b")!=null) this.novignetting_b=Double.parseDouble(properties.getProperty(prefix+"novignetting_b"));
......@@ -1959,6 +1962,7 @@ public class EyesisCorrectionParameters {
gd.addNumericField("Modify phase correlation to prevent division by very small numbers", this.fat_zero, 4);
gd.addNumericField("LPF correlarion sigma ", this.corr_sigma, 3);
gd.addCheckbox ("Normalize kernels ", this.norm_kern);
gd.addCheckbox ("Equalize gains between channels", this.gains_equalize);
gd.addNumericField("Reg gain in the center of sensor calibration R (instead of vignetting)", this.novignetting_r, 4);
gd.addNumericField("Green gain in the center of sensor calibration G (instead of vignetting)",this.novignetting_g, 4);
gd.addNumericField("Blue gain in the center of sensor calibration B (instead of vignetting)", this.novignetting_b, 4);
......@@ -1986,6 +1990,7 @@ public class EyesisCorrectionParameters {
this.fat_zero = gd.getNextNumber();
this.corr_sigma = gd.getNextNumber();
this.norm_kern= gd.getNextBoolean();
this.gains_equalize= gd.getNextBoolean();
this.novignetting_r= gd.getNextNumber();
this.novignetting_g= gd.getNextNumber();
this.novignetting_b= gd.getNextNumber();
......
......@@ -22,6 +22,7 @@
**
*/
import java.util.ArrayList;
import java.util.concurrent.atomic.AtomicInteger;
import ij.CompositeImage;
......@@ -2486,7 +2487,6 @@ public class EyesisDCT {
}
public void processCLTChannelImages(
// EyesisCorrectionParameters.DCTParameters dct_parameters,
EyesisCorrectionParameters.CLTParameters clt_parameters,
EyesisCorrectionParameters.DebayerParameters debayerParameters,
EyesisCorrectionParameters.NonlinParameters nonlinParameters,
......@@ -2616,8 +2616,6 @@ public class EyesisDCT {
}
System.out.println("Processing "+fileIndices.length+" files finished at "+
IJ.d2s(0.000000001*(System.nanoTime()-this.startTime),3)+" sec, --- Free memory="+Runtime.getRuntime().freeMemory()+" (of "+Runtime.getRuntime().totalMemory()+")");
}
public ImagePlus processCLTChannelImage(
......@@ -2712,6 +2710,10 @@ public class EyesisDCT {
}
}
}
if (clt_parameters.gains_equalize){
}
String title=name+"-"+String.format("%02d", channel);
ImagePlus result=imp_src;
if (debugLevel>1) System.out.println("processing: "+path);
......@@ -3134,7 +3136,747 @@ public class EyesisDCT {
return result;
}
// Processing sets of 4 images together
public void processCLTSets(
EyesisCorrectionParameters.CLTParameters clt_parameters,
EyesisCorrectionParameters.DebayerParameters debayerParameters,
EyesisCorrectionParameters.NonlinParameters nonlinParameters,
EyesisCorrectionParameters.ColorProcParameters colorProcParameters,
CorrectionColorProc.ColorGainsParameters channelGainParameters,
EyesisCorrectionParameters.RGBParameters rgbParameters,
EyesisCorrectionParameters.EquirectangularParameters equirectangularParameters,
int convolveFFTSize, // 128 - fft size, kernel size should be size/2
final int threadsMax, // maximal number of threads to launch
final boolean updateStatus,
final int debugLevel)
{
this.startTime=System.nanoTime();
String [] sourceFiles=correctionsParameters.getSourcePaths();
boolean [] enabledFiles=new boolean[sourceFiles.length];
for (int i=0;i<enabledFiles.length;i++) enabledFiles[i]=false;
int numFilesToProcess=0;
int numImagesToProcess=0;
for (int nFile=0;nFile<enabledFiles.length;nFile++){
if ((sourceFiles[nFile]!=null) && (sourceFiles[nFile].length()>1)) {
int [] channels={correctionsParameters.getChannelFromSourceTiff(sourceFiles[nFile])};
if (correctionsParameters.isJP4()){
int subCamera= channels[0]- correctionsParameters.firstSubCamera; // to match those in the sensor files
// removeUnusedSensorData should be off!?
channels=this.eyesisCorrections.pixelMapping.channelsForSubCamera(subCamera);
}
if (channels!=null){
for (int i=0;i<channels.length;i++) if (eyesisCorrections.isChannelEnabled(channels[i])){
if (!enabledFiles[nFile]) numFilesToProcess++;
enabledFiles[nFile]=true;
numImagesToProcess++;
}
}
}
}
if (numFilesToProcess==0){
System.out.println("No files to process (of "+sourceFiles.length+")");
return;
} else {
if (debugLevel>0) System.out.println(numFilesToProcess+ " files to process (of "+sourceFiles.length+"), "+numImagesToProcess+" images to process");
}
double [] referenceExposures=eyesisCorrections.calcReferenceExposures(debugLevel); // multiply each image by this and divide by individual (if not NaN)
int [][] fileIndices=new int [numImagesToProcess][2]; // file index, channel number
int index=0;
for (int nFile=0;nFile<enabledFiles.length;nFile++){
if ((sourceFiles[nFile]!=null) && (sourceFiles[nFile].length()>1)) {
int [] channels={correctionsParameters.getChannelFromSourceTiff(sourceFiles[nFile])};
if (correctionsParameters.isJP4()){
int subCamera= channels[0]- correctionsParameters.firstSubCamera; // to match those in the sensor files
channels=eyesisCorrections.pixelMapping.channelsForSubCamera(subCamera);
}
if (channels!=null){
for (int i=0;i<channels.length;i++) if (eyesisCorrections.isChannelEnabled(channels[i])){
fileIndices[index ][0]=nFile;
fileIndices[index++][1]=channels[i];
}
}
}
}
ArrayList<String> setNames = new ArrayList<String>();
ArrayList<ArrayList<Integer>> setFiles = new ArrayList<ArrayList<Integer>>();
for (int iImage=0;iImage<fileIndices.length;iImage++){
int nFile=fileIndices[iImage][0];
String setName = correctionsParameters.getNameFromSourceTiff(sourceFiles[nFile]);
if (!setNames.contains(setName)) {
setNames.add(setName);
setFiles.add(new ArrayList<Integer>());
}
setFiles.get(setNames.indexOf(setName)).add(new Integer(nFile));
}
int iImage = 0;
for (int nSet = 0; nSet < setNames.size(); nSet++){
int maxChn = 0;
for (int i = 0; i < setFiles.get(nSet).size(); i++){
int chn = fileIndices[setFiles.get(nSet).get(i)][1];
if (chn > maxChn) maxChn = chn;
}
int [] channelFiles = new int[maxChn+1];
for (int i =0; i < channelFiles.length; i++) channelFiles[i] = -1;
for (int i = 0; i < setFiles.get(nSet).size(); i++){
channelFiles[fileIndices[setFiles.get(nSet).get(i)][1]] = setFiles.get(nSet).get(i);
}
ImagePlus [] imp_srcs = new ImagePlus[channelFiles.length];
double [] scaleExposure = new double[channelFiles.length];
for (int srcChannel=0; srcChannel<channelFiles.length; srcChannel++){
int nFile=channelFiles[srcChannel];
imp_srcs[srcChannel]=null;
if (nFile >=0){
if (correctionsParameters.isJP4()){
int subchannel=eyesisCorrections.pixelMapping.getSubChannel(srcChannel);
if (this.correctionsParameters.swapSubchannels01) {
switch (subchannel){
case 0: subchannel=1; break;
case 1: subchannel=0; break;
}
}
if (debugLevel>0) System.out.println("Processing set " + setNames.get(nSet)+" channel "+srcChannel+" - subchannel "+subchannel+" of "+sourceFiles[nFile]);
ImagePlus imp_composite=eyesisCorrections.JP4_INSTANCE.open(
"", // path,
sourceFiles[nFile],
"", //arg - not used in JP46 reader
true, // un-apply camera color gains
null, // new window
false); // do not show
imp_srcs[srcChannel]=eyesisCorrections.JP4_INSTANCE.demuxImage(imp_composite, subchannel);
if (imp_srcs[srcChannel] == null) imp_srcs[srcChannel] = imp_composite; // not a composite image
// do we need to add any properties?
} else {
imp_srcs[srcChannel]=new ImagePlus(sourceFiles[nFile]);
// (new JP46_Reader_camera(false)).decodeProperiesFromInfo(imp_src); // decode existent properties from info
eyesisCorrections.JP4_INSTANCE.decodeProperiesFromInfo(imp_srcs[srcChannel]); // decode existent properties from info
if (debugLevel>0) System.out.println("Processing "+sourceFiles[nFile]);
}
scaleExposure[srcChannel] = 1.0;
if (!Double.isNaN(referenceExposures[nFile]) && (imp_srcs[srcChannel].getProperty("EXPOSURE")!=null)){
scaleExposure[srcChannel] = referenceExposures[nFile]/Double.parseDouble((String) imp_srcs[srcChannel].getProperty("EXPOSURE"));
if (debugLevel>0) System.out.println("Will scale intensity (to compensate for exposure) by "+scaleExposure);
}
imp_srcs[srcChannel].setProperty("name", correctionsParameters.getNameFromSourceTiff(sourceFiles[nFile]));
imp_srcs[srcChannel].setProperty("channel", srcChannel); // it may already have channel
imp_srcs[srcChannel].setProperty("path", sourceFiles[nFile]); // it may already have channel
if (this.correctionsParameters.pixelDefects && (eyesisCorrections.defectsXY!=null)&& (eyesisCorrections.defectsXY[srcChannel]!=null)){
// apply pixel correction
int numApplied= eyesisCorrections.correctDefects(
imp_srcs[srcChannel],
srcChannel,
debugLevel);
if ((debugLevel>0) && (numApplied>0)) { // reduce verbosity after verified defect correction works
System.out.println("Corrected "+numApplied+" pixels in "+sourceFiles[nFile]);
}
}
if (this.correctionsParameters.vignetting){
if ((eyesisCorrections.channelVignettingCorrection==null) || (srcChannel<0) || (srcChannel>=eyesisCorrections.channelVignettingCorrection.length) || (eyesisCorrections.channelVignettingCorrection[srcChannel]==null)){
System.out.println("No vignetting data for channel "+srcChannel);
return;
}
float [] pixels=(float []) imp_srcs[srcChannel].getProcessor().getPixels();
if (pixels.length!=eyesisCorrections.channelVignettingCorrection[srcChannel].length){
System.out.println("Vignetting data for channel "+srcChannel+" has "+eyesisCorrections.channelVignettingCorrection[srcChannel].length+" pixels, image "+sourceFiles[nFile]+" has "+pixels.length);
return;
}
// TODO: Move to do it once:
double min_non_zero = 0.0;
for (int i=0;i<pixels.length;i++){
double d = eyesisCorrections.channelVignettingCorrection[srcChannel][i];
if ((d > 0.0) && ((min_non_zero == 0) || (min_non_zero > d))){
min_non_zero = d;
}
}
double max_vign_corr = clt_parameters.vignetting_range*min_non_zero;
System.out.println("Vignetting data: channel="+srcChannel+", min = "+min_non_zero);
for (int i=0;i<pixels.length;i++){
double d = eyesisCorrections.channelVignettingCorrection[srcChannel][i];
if (d > max_vign_corr) d = max_vign_corr;
pixels[i]*=d;
}
// Scale here, combine with vignetting later?
int width = imp_srcs[srcChannel].getWidth();
int height = imp_srcs[srcChannel].getHeight();
for (int y = 0; y < height-1; y+=2){
for (int x = 0; x < width-1; x+=2){
pixels[y*width+x ] *= clt_parameters.scale_g;
pixels[y*width+x+width+1] *= clt_parameters.scale_g;
pixels[y*width+x +1] *= clt_parameters.scale_r;
pixels[y*width+x+width ] *= clt_parameters.scale_b;
}
}
} else { // assuming GR/BG pattern
System.out.println("Applying fixed color gain correction parameters: Gr="+
clt_parameters.novignetting_r+", Gg="+clt_parameters.novignetting_g+", Gb="+clt_parameters.novignetting_b);
float [] pixels=(float []) imp_srcs[srcChannel].getProcessor().getPixels();
int width = imp_srcs[srcChannel].getWidth();
int height = imp_srcs[srcChannel].getHeight();
double kr = clt_parameters.scale_r/clt_parameters.novignetting_r;
double kg = clt_parameters.scale_g/clt_parameters.novignetting_g;
double kb = clt_parameters.scale_b/clt_parameters.novignetting_b;
for (int y = 0; y < height-1; y+=2){
for (int x = 0; x < width-1; x+=2){
pixels[y*width+x ] *= kg;
pixels[y*width+x+width+1] *= kg;
pixels[y*width+x +1] *= kr;
pixels[y*width+x+width ] *= kb;
}
}
}
}
}
// may need to equalize gains between channels
if (clt_parameters.gains_equalize){
double [][] avr_pix = new double [channelFiles.length][3];
double [] avr_RGB = {0.0,0.0,0.0};
int numChn = 0;
for (int srcChannel=0; srcChannel < channelFiles.length; srcChannel++){
int nFile=channelFiles[srcChannel];
if (nFile >=0){
for (int i = 0; i < avr_pix[srcChannel].length; i++) avr_pix[srcChannel][i] = 0;
float [] pixels=(float []) imp_srcs[srcChannel].getProcessor().getPixels();
int width = imp_srcs[srcChannel].getWidth();
int height = imp_srcs[srcChannel].getHeight();
for (int y = 0; y < height-1; y+=2){
for (int x = 0; x < width-1; x+=2){
avr_pix[srcChannel][0] += pixels[y*width+x +1];
avr_pix[srcChannel][2] += pixels[y*width+x+width ];
avr_pix[srcChannel][1] += pixels[y*width+x ];
avr_pix[srcChannel][1] += pixels[y*width+x+width+1];
}
}
avr_pix[srcChannel][0] /= 0.25*width*height;
avr_pix[srcChannel][1] /= 0.5 *width*height;
avr_pix[srcChannel][2] /= 0.25*width*height;
for (int j=0; j < avr_RGB.length; j++) avr_RGB[j] += avr_pix[srcChannel][j];
numChn++;
if (debugLevel>-1) {
System.out.println("processCLTSets(): set "+ setNames.get(nSet) + " channel "+srcChannel+
" R"+avr_pix[srcChannel][0]+" G"+avr_pix[srcChannel][1]+" B"+avr_pix[srcChannel][2]);
}
}
}
for (int j=0; j < avr_RGB.length; j++) avr_RGB[j] /= numChn;
if (debugLevel>-1) {
System.out.println("processCLTSets(): set "+ setNames.get(nSet) + "average color values: "+
" R="+avr_RGB[0]+" G=" + avr_RGB[1]+" B=" + avr_RGB[2]);
}
for (int srcChannel=0; srcChannel < channelFiles.length; srcChannel++){
int nFile=channelFiles[srcChannel];
if (nFile >=0){
double [] scales = new double [avr_RGB.length];
for (int j=0;j < scales.length; j++){
scales[j] = avr_RGB[j]/avr_pix[srcChannel][j];
}
float [] pixels=(float []) imp_srcs[srcChannel].getProcessor().getPixels();
int width = imp_srcs[srcChannel].getWidth();
int height = imp_srcs[srcChannel].getHeight();
for (int y = 0; y < height-1; y+=2){
for (int x = 0; x < width-1; x+=2){
pixels[y*width+x ] *= scales[1];
pixels[y*width+x+width+1] *= scales[1];
pixels[y*width+x +1] *= scales[0];
pixels[y*width+x+width ] *= scales[2];
}
}
}
}
}
for (int srcChannel=0; srcChannel<channelFiles.length; srcChannel++){
int nFile=channelFiles[srcChannel];
if (nFile >=0){
processCLTSetImage( // returns ImagePlus, but it already should be saved/shown
imp_srcs[srcChannel], // should have properties "name"(base for saving results), "channel","path"
clt_parameters,
debayerParameters,
nonlinParameters,
colorProcParameters,
channelGainParameters,
rgbParameters,
convolveFFTSize, // 128 - fft size, kernel size should be size/2
scaleExposure[srcChannel],
threadsMax, // maximal number of threads to launch
updateStatus,
debugLevel);
// warp result (add support for different color modes)
if (this.correctionsParameters.equirectangular){
if (equirectangularParameters.clearFullMap) eyesisCorrections.pixelMapping.deleteEquirectangularMapFull(srcChannel); // save memory? //removeUnusedSensorData - no, use equirectangular specific settings
if (equirectangularParameters.clearAllMaps) eyesisCorrections.pixelMapping.deleteEquirectangularMapAll(srcChannel); // save memory? //removeUnusedSensorData - no, use equirectangular specific settings
}
//pixelMapping
Runtime.getRuntime().gc();
if (debugLevel >-1) System.out.println("Processing image "+(iImage+1)+" (of "+fileIndices.length+") finished at "+
IJ.d2s(0.000000001*(System.nanoTime()-this.startTime),3)+" sec, --- Free memory="+Runtime.getRuntime().freeMemory()+" (of "+Runtime.getRuntime().totalMemory()+")");
if (eyesisCorrections.stopRequested.get()>0) {
System.out.println("User requested stop");
return;
}
iImage++;
}
}
}
System.out.println("Processing "+fileIndices.length+" files finished at "+
IJ.d2s(0.000000001*(System.nanoTime()-this.startTime),3)+" sec, --- Free memory="+Runtime.getRuntime().freeMemory()+" (of "+Runtime.getRuntime().totalMemory()+")");
}
public ImagePlus processCLTSetImage(
ImagePlus imp_src, // should have properties "name"(base for saving results), "channel","path"
EyesisCorrectionParameters.CLTParameters clt_parameters,
EyesisCorrectionParameters.DebayerParameters debayerParameters,
EyesisCorrectionParameters.NonlinParameters nonlinParameters,
EyesisCorrectionParameters.ColorProcParameters colorProcParameters,
CorrectionColorProc.ColorGainsParameters channelGainParameters,
EyesisCorrectionParameters.RGBParameters rgbParameters,
int convolveFFTSize, // 128 - fft size, kernel size should be size/2
double scaleExposure,
final int threadsMax, // maximal number of threads to launch
final boolean updateStatus,
final int debugLevel){
boolean advanced=this.correctionsParameters.zcorrect || this.correctionsParameters.equirectangular;
// boolean crop= advanced? true: this.correctionsParameters.crop;
boolean rotate= advanced? false: this.correctionsParameters.rotate;
double JPEG_scale= advanced? 1.0: this.correctionsParameters.JPEG_scale;
boolean toRGB= advanced? true: this.correctionsParameters.toRGB;
showDoubleFloatArrays sdfa_instance = new showDoubleFloatArrays(); // just for debugging?
// may use this.StartTime to report intermediate steps execution times
String name=(String) imp_src.getProperty("name");
// int channel= Integer.parseInt((String) imp_src.getProperty("channel"));
int channel= (Integer) imp_src.getProperty("channel");
String path= (String) imp_src.getProperty("path");
String title=name+"-"+String.format("%02d", channel);
ImagePlus result=imp_src;
if (debugLevel>1) System.out.println("processing: "+path);
result.setTitle(title+"RAW");
if (!this.correctionsParameters.split){
eyesisCorrections.saveAndShow(result, this.correctionsParameters);
return result;
}
// Generate split parameters for DCT processing mode
/*
EyesisCorrectionParameters.SplitParameters splitParameters = new EyesisCorrectionParameters.SplitParameters(
1, // oversample; // currently source kernels are oversampled
clt_parameters.transform_size/2, // addLeft
clt_parameters.transform_size/2, // addTop
clt_parameters.transform_size/2, // addRight
clt_parameters.transform_size/2 // addBottom
);
*/
// Split into Bayer components, oversample, increase canvas
double [][] double_stack = eyesisCorrections.bayerToDoubleStack(
result, // source Bayer image, linearized, 32-bit (float))
null); // no margins, no oversample
// ImageStack stack= eyesisCorrections.bayerToStack(
// result, // source Bayer image, linearized, 32-bit (float))
// splitParameters);
String titleFull=title+"-SPLIT";
if (debugLevel > -1){
double [] chn_avg = {0.0,0.0,0.0};
int width = imp_src.getWidth();
int height = imp_src.getHeight();
for (int c = 0; c < 3; c++){
for (int i = 0; i<double_stack[c].length; i++){
chn_avg[c] += double_stack[c][i];
}
}
chn_avg[0] /= width*height/4;
chn_avg[1] /= width*height/4;
chn_avg[2] /= width*height/2;
System.out.println("Split channels averages: R="+chn_avg[0]+", G="+chn_avg[2]+", B="+chn_avg[1]);
}
String [] rbg_titles = {"Red", "Blue", "Green"};
ImageStack stack;
if (!this.correctionsParameters.debayer) {
// showDoubleFloatArrays sdfa_instance = new showDoubleFloatArrays(); // just for debugging?
// ImageStack
stack = sdfa_instance.makeStack(double_stack, imp_src.getWidth(), imp_src.getHeight(), rbg_titles);
result= new ImagePlus(titleFull, stack);
eyesisCorrections.saveAndShow(result, this.correctionsParameters);
return result;
}
// =================
if (debugLevel > 0) {
System.out.println("Showing image BEFORE_CLT_PROC");
sdfa_instance.showArrays(double_stack, imp_src.getWidth(), imp_src.getHeight(), true, "BEFORE_CLT_PROC", rbg_titles);
}
if (this.correctionsParameters.deconvolve) { // process with DCT, otherwise use simple debayer
ImageDtt image_dtt = new ImageDtt();
/*
double [][][][][] clt_data = image_dtt.cltStack(
stack,
channel,
clt_parameters,
clt_parameters.ishift_x, //final int shiftX, // shift image horizontally (positive - right)
clt_parameters.ishift_y, //final int shiftY, // shift image vertically (positive - down)
threadsMax,
debugLevel,
updateStatus);
*/
for (int i =0 ; i < double_stack[0].length; i++){
// double_stack[0][i]*=2.0; // Scale red twice to compensate less pixels than green
// double_stack[1][i]*=2.0; // Scale blue twice to compensate less pixels than green
double_stack[2][i]*=0.5; // Scale blue twice to compensate less pixels than green
}
double [][][][][] clt_data = image_dtt.clt_aberrations(
double_stack, // final double [][] imade_data,
imp_src.getWidth(), // final int width,
clt_kernels[channel], // final double [][][][][] clt_kernels, // [color][tileY][tileX][band][pixel] , size should match image (have 1 tile around)
clt_parameters.kernel_step,
clt_parameters.transform_size,
clt_parameters.clt_window,
clt_parameters.shift_x, // final int shiftX, // shift image horizontally (positive - right) - just for testing
clt_parameters.shift_y, // final int shiftY, // shift image vertically (positive - down)
clt_parameters.tileX, // final int debug_tileX,
clt_parameters.tileY, // final int debug_tileY,
(clt_parameters.dbg_mode & 64) != 0, // no fract shift
(clt_parameters.dbg_mode & 128) != 0, // no convolve
(clt_parameters.dbg_mode & 256) != 0, // transpose convolve
threadsMax,
debugLevel);
// updateStatus);
System.out.println("clt_data.length="+clt_data.length+" clt_data[0].length="+clt_data[0].length
+" clt_data[0][0].length="+clt_data[0][0].length+" clt_data[0][0][0].length="+
clt_data[0][0][0].length+" clt_data[0][0][0][0].length="+clt_data[0][0][0][0].length);
/*
if (dct_parameters.color_DCT){ // convert RBG -> YPrPb
dct_data = image_dtt.dct_color_convert(
dct_data,
colorProcParameters.kr,
colorProcParameters.kb,
dct_parameters.sigma_rb, // blur of channels 0,1 (r,b) in addition to 2 (g)
dct_parameters.sigma_y, // blur of Y from G
dct_parameters.sigma_color, // blur of Pr, Pb in addition to Y
threadsMax,
debugLevel);
} else { // just LPF RGB
*/
if (clt_parameters.corr_sigma > 0){ // no filter at all
for (int chn = 0; chn < clt_data.length; chn++) {
image_dtt.clt_lpf(
clt_parameters.corr_sigma,
clt_data[chn],
threadsMax,
debugLevel);
}
}
/*
}
*/
int tilesY = imp_src.getHeight()/clt_parameters.transform_size;
int tilesX = imp_src.getWidth()/clt_parameters.transform_size;
if (debugLevel > 0){
System.out.println("--tilesX="+tilesX);
System.out.println("--tilesY="+tilesY);
}
if (debugLevel > 1){
double [][] clt = new double [clt_data.length*4][];
for (int chn = 0; chn < clt_data.length; chn++) {
double [][] clt_set = image_dtt.clt_dbg(
clt_data [chn],
threadsMax,
debugLevel);
for (int ii = 0; ii < clt_set.length; ii++) clt[chn*4+ii] = clt_set[ii];
}
if (debugLevel > 0){
sdfa_instance.showArrays(clt,
tilesX*clt_parameters.transform_size,
tilesY*clt_parameters.transform_size,
true,
result.getTitle()+"-CLT");
}
}
double [][] iclt_data = new double [clt_data.length][];
for (int chn=0; chn<clt_data.length;chn++){
iclt_data[chn] = image_dtt.iclt_2d(
clt_data[chn], // scanline representation of dcd data, organized as dct_size x dct_size tiles
clt_parameters.transform_size, // final int
clt_parameters.clt_window, // window_type
15, // clt_parameters.iclt_mask, //which of 4 to transform back
0, // clt_parameters.dbg_mode, //which of 4 to transform back
threadsMax,
debugLevel);
}
/*
if (dct_parameters.color_DCT){ // convert RBG -> YPrPb
if (debugLevel > 0) sdfa_instance.showArrays(
idct_data,
(tilesX + 1) * dct_parameters.dct_size,
(tilesY + 1) * dct_parameters.dct_size,
true,
result.getTitle()+"-IDCT-YPrPb");
if (dct_parameters.nonlin && ((dct_parameters.nonlin_y != 0.0) || (dct_parameters.nonlin_c != 0.0))) {
System.out.println("Applying edge emphasis, nonlin_y="+dct_parameters.nonlin_y+
", nonlin_c="+dct_parameters.nonlin_c+", nonlin_corn="+dct_parameters.nonlin_corn);
idct_data = edge_emphasis(
idct_data, // final double [][] yPrPb,
(tilesX + 1) * dct_parameters.dct_size, // final int width,
dct_parameters.dct_size, // final int step, //(does not need to be this) // just for multi-threading efficiency?
dct_parameters.nonlin_max_y, // final double nonlin_max_y = 1.0; // maximal amount of nonlinear line/edge emphasis for Y component
dct_parameters.nonlin_max_c, // final double nonlin_max_c = 1.0; // maximal amount of nonlinear line/edge emphasis for C component
dct_parameters.nonlin_y, // final double nonlin_y, // = 0.01; // amount of nonlinear line/edge emphasis for Y component
dct_parameters.nonlin_c, // final double nonlin_c, // = 0.01; // amount of nonlinear line/edge emphasis for C component
dct_parameters.nonlin_corn, // final double nonlin_corn, // = 0.5; // relative weight for nonlinear corner elements
(dct_parameters.denoise? dct_parameters.denoise_y:0.0), // final double denoise_y, // = 1.0; // maximal total smoothing of the Y post-kernel (will compete with edge emphasis)
(dct_parameters.denoise? dct_parameters.denoise_c:0.0), // final double denoise_c, // = 1.0; // maximal total smoothing of the color differences post-kernel (will compete with edge emphasis)
dct_parameters.denoise_y_corn, // final double denoise_y_corn, // = 0.5; // weight of the 4 corner pixels during denoise y (relative to 4 straight)
dct_parameters.denoise_c_corn, // final double denoise_c_corn, // = 0.5; // weight of the 4 corner pixels during denoise y (relative to 4 straight)
dct_parameters.dct_size, //, // final int threadsMax, // maximal number of threads to launch
debugLevel); // final int globalDebugLevel)
if (debugLevel > 0) sdfa_instance.showArrays(
idct_data,
(tilesX + 1) * dct_parameters.dct_size,
(tilesY + 1) * dct_parameters.dct_size,
true,
result.getTitle()+"-EMPH-"+dct_parameters.nonlin_y+"_"+dct_parameters.nonlin_c+"_"+dct_parameters.nonlin_corn);
}
// temporary convert back to RGB
idct_data = YPrPbToRBG(idct_data,
colorProcParameters.kr, // 0.299;
colorProcParameters.kb, // 0.114;
(tilesX + 1) * dct_parameters.dct_size);
} else {
if (dct_parameters.post_debayer){ // post_debayer
if (debugLevel > -1) System.out.println("Applying post-debayer");
if (debugLevel > -1) sdfa_instance.showArrays(
idct_data,
(tilesX + 1) * dct_parameters.dct_size,
(tilesY + 1) * dct_parameters.dct_size,
true,
result.getTitle()+"-rbg_before");
idct_data = post_debayer( // debayer in pixel domain after aberration correction
idct_data, // final double [][] rbg, // yPrPb,
(tilesX + 1) * dct_parameters.dct_size, // final int width,
dct_parameters.dct_size, // final int step, // just for multi-threading efficiency?
dct_parameters.dct_size, // final int threadsMax, // maximal number of threads to launch
debugLevel); // final int globalDebugLevel)
// add here YPrPb conversion, then edge_emphasis
if (debugLevel > -1) sdfa_instance.showArrays(
idct_data,
(tilesX + 1) * dct_parameters.dct_size,
(tilesY + 1) * dct_parameters.dct_size,
true,
result.getTitle()+"-rbg_after");
} else {
*/
// if (debugLevel > -1) System.out.println("Applyed LPF, sigma = "+dct_parameters.dbg_sigma);
if (debugLevel > 0) sdfa_instance.showArrays(
iclt_data,
(tilesX + 1) * clt_parameters.transform_size,
(tilesY + 1) * clt_parameters.transform_size,
true,
result.getTitle()+"-rbg_sigma");
/*
}
}
*/
if (debugLevel > 0) sdfa_instance.showArrays(iclt_data,
(tilesX + 0) * clt_parameters.transform_size,
(tilesY + 0) * clt_parameters.transform_size,
true,
result.getTitle()+"-ICLT-RGB");
// convert to ImageStack of 3 slices
String [] sliceNames = {"red", "blue", "green"};
stack = sdfa_instance.makeStack(
iclt_data,
(tilesX + 0) * clt_parameters.transform_size,
(tilesY + 0) * clt_parameters.transform_size,
sliceNames); // or use null to get chn-nn slice names
} else { // if (this.correctionsParameters.deconvolve) - here use a simple debayer
System.out.println("Bypassing CLT-based aberration correction");
stack = sdfa_instance.makeStack(double_stack, imp_src.getWidth(), imp_src.getHeight(), rbg_titles);
debayer_rbg(stack, 0.25); // simple standard 3x3 kernel debayer
}
if (debugLevel > -1){
double [] chn_avg = {0.0,0.0,0.0};
float [] pixels;
int width = stack.getWidth();
int height = stack.getHeight();
for (int c = 0; c <3; c++){
pixels = (float[]) stack.getPixels(c+1);
for (int i = 0; i<pixels.length; i++){
chn_avg[c] += pixels[i];
}
}
chn_avg[0] /= width*height;
chn_avg[1] /= width*height;
chn_avg[2] /= width*height;
System.out.println("Processed channels averages: R="+chn_avg[0]+", G="+chn_avg[2]+", B="+chn_avg[1]);
}
if (!this.correctionsParameters.colorProc){
result= new ImagePlus(titleFull, stack);
eyesisCorrections.saveAndShow(
result,
this.correctionsParameters);
return result;
}
if (debugLevel > 1) System.out.println("before colors.1");
//Processing colors - changing stack sequence to r-g-b (was r-b-g)
if (!eyesisCorrections.fixSliceSequence(
stack,
debugLevel)){
if (debugLevel > -1) System.out.println("fixSliceSequence() returned false");
return null;
}
if (debugLevel > 1) System.out.println("before colors.2");
if (debugLevel > 1){
ImagePlus imp_dbg=new ImagePlus(imp_src.getTitle()+"-"+channel+"-preColors",stack);
eyesisCorrections.saveAndShow(
imp_dbg,
this.correctionsParameters);
}
if (debugLevel > 1) System.out.println("before colors.3, scaleExposure="+scaleExposure+" scale = "+(255.0/eyesisCorrections.psfSubpixelShouldBe4/eyesisCorrections.psfSubpixelShouldBe4/scaleExposure));
CorrectionColorProc correctionColorProc=new CorrectionColorProc(eyesisCorrections.stopRequested);
double [][] yPrPb=new double [3][];
// if (dct_parameters.color_DCT){
// need to get YPbPr - not RGB here
// } else {
correctionColorProc.processColorsWeights(stack, // just gamma convert? TODO: Cleanup? Convert directly form the linear YPrPb
// 255.0/this.psfSubpixelShouldBe4/this.psfSubpixelShouldBe4, // double scale, // initial maximal pixel value (16))
// 255.0/eyesisCorrections.psfSubpixelShouldBe4/eyesisCorrections.psfSubpixelShouldBe4/scaleExposure, // double scale, // initial maximal pixel value (16))
// 255.0/2/2/scaleExposure, // double scale, // initial maximal pixel value (16))
255.0/scaleExposure, // double scale, // initial maximal pixel value (16))
colorProcParameters,
channelGainParameters,
channel,
null, //correctionDenoise.getDenoiseMask(),
this.correctionsParameters.blueProc,
debugLevel);
if (debugLevel > 1) System.out.println("Processed colors to YPbPr, total number of slices="+stack.getSize());
if (debugLevel > 1) {
ImagePlus imp_dbg=new ImagePlus("procColors",stack);
eyesisCorrections.saveAndShow(
imp_dbg,
this.correctionsParameters);
}
float [] fpixels;
int [] slices_YPrPb = {8,6,7};
yPrPb=new double [3][];
for (int n = 0; n < slices_YPrPb.length; n++){
fpixels = (float[]) stack.getPixels(slices_YPrPb[n]);
yPrPb[n] = new double [fpixels.length];
for (int i = 0; i < fpixels.length; i++) yPrPb[n][i] = fpixels[i];
}
if (toRGB) {
System.out.println("correctionColorProc.YPrPbToRGB");
stack = YPrPbToRGB(yPrPb,
colorProcParameters.kr, // 0.299;
colorProcParameters.kb, // 0.114;
stack.getWidth());
title=titleFull; // including "-DECONV" or "-COMBO"
titleFull=title+"-RGB-float";
//Trim stack to just first 3 slices
if (debugLevel > 1){ // 2){
ImagePlus imp_dbg=new ImagePlus("YPrPbToRGB",stack);
eyesisCorrections.saveAndShow(
imp_dbg,
this.correctionsParameters);
}
while (stack.getSize() > 3) stack.deleteLastSlice();
if (debugLevel > 1) System.out.println("Trimming color stack");
} else {
title=titleFull; // including "-DECONV" or "-COMBO"
titleFull=title+"-YPrPb"; // including "-DECONV" or "-COMBO"
if (debugLevel > 1) System.out.println("Using full stack, including YPbPr");
}
result= new ImagePlus(titleFull, stack);
// Crop image to match original one (scaled to oversampling)
/*
if (crop){ // always crop if equirectangular
if (debugLevel > 1) System.out.println("cropping");
stack = eyesisCorrections.cropStack32(stack,splitParameters);
if (debugLevel > 2) { // 2){
ImagePlus imp_dbg=new ImagePlus("cropped",stack);
eyesisCorrections.saveAndShow(
imp_dbg,
this.correctionsParameters);
}
}
*/
// rotate the result
if (rotate){ // never rotate for equirectangular
stack=eyesisCorrections.rotateStack32CW(stack);
}
if (!toRGB && !this.correctionsParameters.jpeg){ // toRGB set for equirectangular
if (debugLevel > 1) System.out.println("!toRGB && !this.correctionsParameters.jpeg");
eyesisCorrections.saveAndShow(result, this.correctionsParameters);
return result;
} else { // that's not the end result, save if required
if (debugLevel > 1) System.out.println("!toRGB && !this.correctionsParameters.jpeg - else");
eyesisCorrections.saveAndShow(result,
eyesisCorrections.correctionsParameters,
eyesisCorrections.correctionsParameters.save32,
false,
eyesisCorrections.correctionsParameters.JPEG_quality); // save, no show
}
// convert to RGB48 (16 bits per color component)
ImagePlus imp_RGB;
stack=eyesisCorrections.convertRGB32toRGB16Stack(
stack,
rgbParameters);
titleFull=title+"-RGB48";
result= new ImagePlus(titleFull, stack);
// ImagePlus imp_RGB24;
result.updateAndDraw();
if (debugLevel > 1) System.out.println("result.updateAndDraw(), "+titleFull+"-RGB48");
CompositeImage compositeImage=eyesisCorrections.convertToComposite(result);
if (!this.correctionsParameters.jpeg && !advanced){ // RGB48 was the end result
if (debugLevel > 1) System.out.println("if (!this.correctionsParameters.jpeg && !advanced)");
eyesisCorrections.saveAndShow(compositeImage, this.correctionsParameters);
return result;
} else { // that's not the end result, save if required
if (debugLevel > 1) System.out.println("if (!this.correctionsParameters.jpeg && !advanced) - else");
eyesisCorrections.saveAndShow(compositeImage, this.correctionsParameters, this.correctionsParameters.save16, false); // save, no show
// eyesisCorrections.saveAndShow(compositeImage, this.correctionsParameters, this.correctionsParameters.save16, true); // save, no show
}
imp_RGB=eyesisCorrections.convertRGB48toRGB24(
stack,
title+"-RGB24",
0, 65536, // r range 0->0, 65536->256
0, 65536, // g range
0, 65536);// b range
if (JPEG_scale!=1.0){
ImageProcessor ip=imp_RGB.getProcessor();
ip.setInterpolationMethod(ImageProcessor.BICUBIC);
ip=ip.resize((int)(ip.getWidth()*JPEG_scale),(int) (ip.getHeight()*JPEG_scale));
imp_RGB= new ImagePlus(imp_RGB.getTitle(),ip);
imp_RGB.updateAndDraw();
}
eyesisCorrections.saveAndShow(imp_RGB, this.correctionsParameters);
return result;
}
......
......@@ -494,6 +494,7 @@ private Panel panel1,
addButton("Read CLT kernels", panelClt1, color_process);
addButton("Reset CLT kernels", panelClt1, color_stop);
addButton("CLT process files", panelClt1, color_process);
addButton("CLT process sets", panelClt1, color_process);
add(panelClt1);
}
pack();
......@@ -4218,8 +4219,96 @@ private Panel panel1,
true,
PROPERTIES);
}
return;
} else if (label.equals("CLT process sets")) {
DEBUG_LEVEL=MASTER_DEBUG_LEVEL;
EYESIS_CORRECTIONS.setDebug(DEBUG_LEVEL);
if (EYESIS_DCT == null){
EYESIS_DCT = new EyesisDCT (
EYESIS_CORRECTIONS,
CORRECTION_PARAMETERS,
DCT_PARAMETERS);
if (DEBUG_LEVEL > 0){
System.out.println("Created new EyesisDCT instance, will need to read CLT kernels");
}
}
String configPath=null;
if (EYESIS_CORRECTIONS.correctionsParameters.saveSettings) {
configPath=EYESIS_CORRECTIONS.correctionsParameters.selectResultsDirectory(
true,
true);
if (configPath==null){
String msg="No results directory selected, command aborted";
System.out.println("Warning: "+msg);
IJ.showMessage("Warning",msg);
return;
}
configPath+=Prefs.getFileSeparator()+"autoconfig";
try {
saveTimestampedProperties(
configPath, // full path or null
null, // use as default directory if path==null
true,
PROPERTIES);
} catch (Exception e){
String msg="Failed to save configuration to "+configPath+", command aborted";
System.out.println("Error: "+msg);
IJ.showMessage("Error",msg);
return;
}
}
EYESIS_CORRECTIONS.initSensorFiles(DEBUG_LEVEL);
int numChannels=EYESIS_CORRECTIONS.getNumChannels();
NONLIN_PARAMETERS.modifyNumChannels(numChannels);
CHANNEL_GAINS_PARAMETERS.modifyNumChannels(numChannels);
if (!EYESIS_DCT.CLTKernelsAvailable()){
if (DEBUG_LEVEL > 0){
System.out.println("Reading CLT kernels");
}
EYESIS_DCT.readCLTKernels(
CLT_PARAMETERS,
THREADS_MAX,
UPDATE_STATUS, // update status info
DEBUG_LEVEL);
if (DEBUG_LEVEL > 1){
EYESIS_DCT.showCLTKernels(
THREADS_MAX,
UPDATE_STATUS, // update status info
DEBUG_LEVEL);
}
}
///========================================
EYESIS_DCT.processCLTSets(
CLT_PARAMETERS, // EyesisCorrectionParameters.DCTParameters dct_parameters,
DEBAYER_PARAMETERS, //EyesisCorrectionParameters.DebayerParameters debayerParameters,
NONLIN_PARAMETERS, //EyesisCorrectionParameters.NonlinParameters nonlinParameters,
COLOR_PROC_PARAMETERS, //EyesisCorrectionParameters.ColorProcParameters colorProcParameters,
CHANNEL_GAINS_PARAMETERS, //CorrectionColorProc.ColorGainsParameters channelGainParameters,
RGB_PARAMETERS, //EyesisCorrectionParameters.RGBParameters rgbParameters,
EQUIRECTANGULAR_PARAMETERS, // EyesisCorrectionParameters.EquirectangularParameters equirectangularParameters,
CONVOLVE_FFT_SIZE, //int convolveFFTSize, // 128 - fft size, kernel size should be size/2
THREADS_MAX, //final int threadsMax, // maximal number of threads to launch
UPDATE_STATUS, //final boolean updateStatus,
DEBUG_LEVEL); //final int debugLevel);
if (configPath!=null) {
saveTimestampedProperties( // save config again
configPath, // full path or null
null, // use as default directory if path==null
true,
PROPERTIES);
}
return;
// End of buttons code
}
......
import Jama.Matrix;
import ij.IJ;
/**
......@@ -26,12 +27,15 @@ import ij.IJ;
*/
public class GeometryCorrection {
// public double azimuth; // azimuth of the lens entrance pupil center, degrees, clockwise looking from top
// public double radius; // mm, distance from the rotation axis
// public double height; // mm, up - from the origin point
// public double phi; // degrees, optical axis from azimuth/r vector, clockwise heading
// public double theta; // degrees, optical axis from the eyesis horizon, positive - up elevation
// public double psi; // degrees, rotation (of the sensor) around the optical axis. Positive if camera is rotated clockwise looking to the target roll
public int debugLevel = 0;
// public double azimuth; // azimuth of the lens entrance pupil center, degrees, clockwise looking from top
// public double radius; // mm, distance from the rotation axis
// public double height; // mm, up - from the origin point
// public double phi; // degrees, optical axis from azimuth/r vector, clockwise heading
// public double theta; // degrees, optical axis from the eyesis horizon, positive - up elevation
// public double psi; // degrees, rotation (of the sensor) around the optical axis. Positive if camera is rotated clockwise looking to the target roll
public int pixelCorrectionWidth=2592; // virtual camera center is at (pixelCorrectionWidth/2, pixelCorrectionHeight/2)
public int pixelCorrectionHeight=1936;
public double focalLength=4.5;
public double pixelSize= 2.2; //um
public double distortionRadius= 2.8512; // mm - half width of the sensor
......@@ -42,117 +46,338 @@ public class GeometryCorrection {
public double distortionA=0.0; // r^4 (normalized to focal length or to sensor half width?)
public double distortionB=0.0; // r^3
public double distortionC=0.0; // r^2
public double px0=1296.0; // center of the lens on the sensor, pixels
public double py0=968.0; // center of the lens on the sensor, pixels
// public double px0=1296.0; // center of the lens on the sensor, pixels
// public double py0=968.0; // center of the lens on the sensor, pixels
// parameters, common for all sensors
public double elevation = 0.0; // degrees, up - positive;
public double heading = 0.0; // degrees, CW (from top) - positive
// public double roll_common = 0.0; // degrees, CW (to target) - positive
public int numSensors = 4;
public double [] forward = null;
public double [] right = null;
public double [] height = null;
public double [] roll = null; // degrees, CW (to target) - positive
public double [][] pXY0 = null; // sensor center XY in pixels
public double common_right; // mm right, camera center
public double common_forward; // mm forward (to target), camera center
public double common_height; // mm up, camera center
public double common_roll; // degrees CW (to target) camera as a whole
public double [][] XYZ_he; // all cameras coordinates transformed to eliminate heading and elevation (rolls preserved)
public double [][] XYZ_her = null; // XYZ of the lenses in a corrected CCS (adjusted for to elevation, heading, common_roll)
public double [][] rXY = null; // XY pairs of the in a normal plane, relative to disparityRadius
public double cameraRadius=0; // average distance from the "mass center" of the sencors to the sensors
public double disparityRadius=0; // distance between cameras to normalize disparity units to. sqrt(2)*disparityRadius for quad camera
private double [] rByRDist=null;
private double stepR=0.001;
private double maxR=2.0; // calculate up to this*distortionRadius
public void setDistortion() {
// imp.setProperty("distortion_formula", "(normalized by distortionRadius in mm) Rdist/R=A8*R^7+A7*R^6+A6*R^5+A5*R^4+A*R^3+B*R^2+C*R+(1-A6-A7-A6-A5-A-B-C)");
// imp.setProperty("distortionRadius", ""+subCam.distortionRadius);
}
private double stepR=0.001;
private double maxR=2.0; // calculate up to this*distortionRadius
/*
if (this.rByRDist==null){
calcReverseDistortionTable();
}
double rND2R=getRByRDist(rD/this.distortionRadius,debugThis);
x*= rND2R; // positive - right
y*=-rND2R; // positive - up
public void setDistortion(
double focalLength,
double distortionC,
double distortionB,
double distortionA,
double distortionA5,
double distortionA6,
double distortionA7,
double distortionA8,
double distortionRadius,
int pixelCorrectionWidth, // virtual camera center is at (pixelCorrectionWidth/2, pixelCorrectionHeight/2)
int pixelCorrectionHeight,
double pixelSize
*/
) {
if (!Double.isNaN(focalLength)) this.focalLength = focalLength;
if (!Double.isNaN(distortionC)) this.distortionC = distortionC;
if (!Double.isNaN(distortionB)) this.distortionB = distortionB;
if (!Double.isNaN(distortionA)) this.distortionA = distortionA;
if (!Double.isNaN(distortionA5)) this.distortionA5 = distortionA5;
if (!Double.isNaN(distortionA6)) this.distortionA6 = distortionA6;
if (!Double.isNaN(distortionA7)) this.distortionA7 = distortionA7;
if (!Double.isNaN(distortionA8)) this.distortionA8 = distortionA8;
if (!Double.isNaN(distortionRadius)) this.distortionRadius = distortionRadius;
if (pixelCorrectionWidth >= 0) this.pixelCorrectionWidth = pixelCorrectionWidth;
if (pixelCorrectionHeight >= 0) this.pixelCorrectionHeight = pixelCorrectionHeight;
if (!Double.isNaN(pixelSize)) this.pixelSize = pixelSize;
// imp.setProperty("distortion_formula", "(normalized by distortionRadius in mm) Rdist/R=A8*R^7+A7*R^6+A6*R^5+A5*R^4+A*R^3+B*R^2+C*R+(1-A6-A7-A6-A5-A-B-C)");
// imp.setProperty("distortionRadius", ""+subCam.distortionRadius);
}
public void setSensors(
int numSensors, // <=0 - keep current
double elevation, // NaN - keep
double heading, // NaN - keep
double [] forward, // null - keep all, NaN - keep individual
double [] right, // null - keep all, NaN - keep individual
double [] roll, // null - keep all, NaN - keep individual
double [][] pXY0){ // null - keep all, [] null - keep individual
// Copied from PixelMapping
/**
* Calculate reverse distortion table - from pixel radius to non-distorted radius
* Rdist/R=A5*R^4+A*R^3+B*R^2+C*R+(1-A5-A-B-C)
* @return false if distortion is too high
*/
public boolean calcReverseDistortionTable(){
boolean debugThis=false; //true;
double delta=1E-8;
double minDerivative=0.1;
int numIterations=1000;
double drDistDr=1.0;
// public double distortionA5=0.0; //r^5 (normalized to focal length or to sensor half width?)
// public double distortionA=0.0; // r^4 (normalized to focal length or to sensor half width?)
// public double distortionB=0.0; // r^3
// public double distortionC=0.0; // r^2
boolean use8=(this.distortionA8!=0.0) || (this.distortionA7!=0.0) || (this.distortionA6!=0.0);
double d=1.0-this.distortionA8-this.distortionA7-this.distortionA6-this.distortionA5-this.distortionA-this.distortionB-this.distortionC;
double rPrev=0.0;
this.rByRDist=new double [(int) Math.ceil(this.maxR/this.stepR)+1];
for (int j=1;j<this.rByRDist.length;j++) this.rByRDist[j]=Double.NaN;
this.rByRDist[0]=1.0/d;
boolean bailOut=false;
if (debugThis) System.out.println("calcReverseDistortionTable()");
for (int i=1;i<this.rByRDist.length;i++) {
double rDist=this.stepR*i;
double r=rPrev+this.stepR/drDistDr;
// if (debugThis) System.out.println("calcReverseDistortionTable() i="+i+" rDist="+rDist+" r="+r+" rPrev="+rPrev);
for (int iteration=0;iteration<numIterations;iteration++){
double k;
if (use8){
k=(((((((this.distortionA8)*r+this.distortionA7)*r+this.distortionA6)*r+this.distortionA5)*r + this.distortionA)*r+this.distortionB)*r+this.distortionC)*r+d;
drDistDr=(((((((8*this.distortionA8)*r + 7*this.distortionA7)*r + 6*this.distortionA6)*r + 5*this.distortionA5)*r + 4*this.distortionA)*r+3*this.distortionB)*r+2*this.distortionC)*r+d;
} else {
k=(((this.distortionA5*r + this.distortionA)*r+this.distortionB)*r+this.distortionC)*r+d;
drDistDr=(((5*this.distortionA5*r + 4*this.distortionA)*r+3*this.distortionB)*r+2*this.distortionC)*r+d;
if (numSensors > 0) this.numSensors = numSensors;
if (!Double.isNaN(elevation)) this.elevation = elevation;
if (!Double.isNaN(heading)) this.heading = heading;
if (forward != null){
if (forward.length != numSensors){
throw new IllegalArgumentException ("forward.length ("+forward.length+") != numSensors ("+numSensors+")");
}
if ((this.forward == null) || (this.forward.length != numSensors)) this.forward = new double [numSensors];
for (int i = 0; i < numSensors; i++) if (!Double.isNaN(forward[i])) this.forward[i] = forward[i];
}
if (right != null){
if (right.length != numSensors){
throw new IllegalArgumentException ("right.length ("+right.length+") != numSensors ("+numSensors+")");
}
if ((this.right == null) || (this.right.length != numSensors)) this.right = new double [numSensors];
for (int i = 0; i < numSensors; i++) if (!Double.isNaN(right[i])) this.right[i] = right[i];
}
if (roll != null){
if (roll.length != numSensors){
throw new IllegalArgumentException ("roll.length ("+roll.length+") != numSensors ("+numSensors+")");
}
double rD=r*k;
if (drDistDr<minDerivative) {
bailOut=true;
break; // too high distortion
if ((this.roll == null) || (this.roll.length != numSensors)) this.roll = new double [numSensors];
for (int i = 0; i < numSensors; i++) if (!Double.isNaN(roll[i])) this.roll[i] = roll[i];
}
if (pXY0 != null){
if (pXY0.length != numSensors){
throw new IllegalArgumentException ("pXY0.length ("+pXY0.length+") != numSensors ("+numSensors+")");
}
if (Math.abs(rD-rDist)<delta) break; // success
r+=(rDist-rD)/drDistDr;
if ((this.pXY0 == null) || (this.pXY0.length != numSensors)) this.pXY0 = new double [numSensors][];
for (int i = 0; i < numSensors; i++) if (pXY0[i] != null) this.pXY0[i] = pXY0[i].clone();
}
if (bailOut) {
if (debugThis) System.out.println("calcReverseDistortionTable() i="+i+" Bailing out, drDistDr="+drDistDr);
return false;
}
rPrev=r;
this.rByRDist[i]=r/rDist;
if (debugThis) System.out.println("calcReverseDistortionTable() i="+i+" rDist="+rDist+" r="+r+" rPrev="+rPrev+" this.rByRDist[i]="+this.rByRDist[i]);
}
return true;
}
public double getRByRDist(double rDist, boolean debug){
// add exceptions;
if (this.rByRDist==null) {
if (debug)System.out.println("getRByRDist("+IJ.d2s(rDist,3)+"): this.rByRDist==null");
return Double.NaN;
public void planeProjectLenses(){ // calculate XYZ_he (any number of sensors)
// get center of the adjusted camera
common_right = 0;
common_forward = 0;
common_height = 0;
for (int i = 0; i < numSensors; i++){
common_right += right[i];
common_forward += forward[i];
common_height += height[i];
}
common_right /= numSensors;
common_forward /= numSensors;
common_height /= numSensors;
// double [][]
this.XYZ_he = new double [numSensors][3]; // after heading, then elevation rotation
/*
rotate by phi around C2Y:Vc3= R3*Vc2
| cos(phi) 0 -sin(phi) | |X|
| 0 1 0 | * |Y|
| sin(phi) 0 cos(phi) | |Z|
*/
double c_head= Math.cos(heading*Math.PI/180);
double s_head= Math.sin(heading*Math.PI/180);
double [][] aR_head={{c_head,0.0,-s_head},{0.0,1.0,0.0},{s_head,0.0,c_head}};
Matrix R_head=new Matrix(aR_head);
/*
rotate by theta around C1X:Vc2= R2*Vc1
| 1 0 0 | |X|
| 0 cos(theta) -sin(theta) | * |Y|
| 0 sin(theta) cos(theta) | |Z|
*/
double c_elev= Math.cos(elevation*Math.PI/180);
double s_elev= Math.sin(elevation*Math.PI/180);
double [][] aR_elev={{1.0,0.0,0.0},{0.0,c_elev, -s_elev},{0.0, s_elev, c_elev}};
Matrix R_elev=new Matrix(aR_elev);
Matrix R_head_elev = R_elev.times(R_head);
for (int i = 0; i<numSensors; i++){
double [][] aXYZi_ccs = {
{ right[i] - common_right},
{- (height[i] - common_height)},
{ forward[i] - common_forward}};
Matrix XYZi_ccs = new Matrix(aXYZi_ccs);
Matrix mXYZ_he = R_head_elev.times(XYZi_ccs);
for (int j = 0; j<3;j++) this.XYZ_he[i][j] = mXYZ_he.get(j, 0);
}
// Calculate average radius
cameraRadius = 0;
for (int i = 0; i < numSensors; i++){
cameraRadius += this.XYZ_he[i][0] * this.XYZ_he[i][0] + this.XYZ_he[i][1] * this.XYZ_he[i][1];
}
cameraRadius = Math.sqrt(cameraRadius/numSensors);
}
// cameras should be Z-numbered (looking to the target, X - right, Y - down)
public void adustSquare(){ // rotate heading/elevation aligned cameras around the Z-axis to make it more "square"
if (numSensors != 4 ){
throw new IllegalArgumentException ("adjustSquare() is valid only for quad-cameras, numSensors="+numSensors);
}
this.disparityRadius = Math.sqrt(2.0) * this.cameraRadius;
double Sx = - XYZ_he[0][1] + XYZ_he[1][0] - XYZ_he[2][0] + XYZ_he[3][1];
double Sy = - XYZ_he[0][0] - XYZ_he[1][1] + XYZ_he[2][1] + XYZ_he[3][0];
double psi = 0.25*Math.PI - Math.atan2(Sy, Sx);
common_roll = psi*180/Math.PI;
/*
Converting from the sub-camera coordinates to the target coordinates
rotate by -psi around CZ
| cos(psi) sin(psi) 0 | |Xc0|
|-sin(psi) cos(psi) 0 | * |Yc0|
| 0 0 1 | |Zc0|
*/
double c_roll= Math.cos(psi*Math.PI/180);
double s_roll= Math.sin(psi*Math.PI/180);
double [][] aR_roll={
{ c_roll, s_roll, 0.0},
{-s_roll, c_roll, 0.0},
{ 0.0, 0.0, 1.0}};
Matrix R_roll = new Matrix(aR_roll);
this.XYZ_her = new double [numSensors][3];
this.rXY = new double [numSensors][2]; // XY pairs of the in a normal plane, relative to disparityRadius
for (int i = 0; i<numSensors; i++){
double [][] aXYZi_he = {
{this.XYZ_he[i][0]},
{this.XYZ_he[i][1]},
{this.XYZ_he[i][2]}};
Matrix mXYZi_he = new Matrix(aXYZi_he);
Matrix mXYZ_her = R_roll.times(mXYZi_he);
for (int j = 0; j<3;j++) this.XYZ_her[i][j] = mXYZ_her.get(j, 0);
for (int j = 0; j<2;j++) this.rXY[i][j] = this.XYZ_her[i][j]/this.disparityRadius;
}
}
if (rDist<0) {
if (debug)System.out.println("getRByRDist("+IJ.d2s(rDist,3)+"): rDist<0");
return Double.NaN;
// return distance from disparity (in pixel units) for the current camera geometry
public double getZFromDisparity(double disparity){
return disparity * this.focalLength * this.pixelSize / this.disparityRadius;
}
int index=(int) Math.floor(rDist/this.stepR);
if (index>=(this.rByRDist.length-1)) {
if (debug) System.out.println("getRByRDist("+IJ.d2s(rDist,3)+"): index="+index+">="+(this.rByRDist.length-1));
return Double.NaN;
/*
* Calculate pixel coordinates for each of numSensors images, for a given (px,py) of the idelaized "center" (still distorted) image
* and generic diparity, measured in pixels
*/
public double [][] getPortsCoordinates(
double px,
double py,
double disparity)
{
double [][] pXY = new double [numSensors][2];
double pXcd = px - 0.5 * this.pixelCorrectionWidth;
double pYcd = py - 0.5 * this.pixelCorrectionHeight;
double rD = Math.sqrt(pXcd*pXcd + pYcd*pYcd); // distorted radius in a virtual center camera
double rND2R=getRByRDist(rD/this.distortionRadius, (debugLevel > 1));
double pXc = pXcd * rND2R; // non-distorted coordinates relative to the (0.5 * this.pixelCorrectionWidth, 0.5 * this.pixelCorrectionHeight)
double pYc = pYcd * rND2R; // in pixels
double [] a={this.distortionC,this.distortionB,this.distortionA,this.distortionA5,this.distortionA6,this.distortionA7,this.distortionA8};
for (int i = 0; i < numSensors; i++){
// non-distorted XY of the shifted location of the individual sensor
double pXci = pXc + disparity * this.rXY[i][0]; // in pixels
double pYci = pYc + disparity * this.rXY[i][1];
// calculate back to distorted
double rNDi = Math.sqrt(pXci*pXci + pYci*pYci); // in pixels
// Rdist/R=A8*R^7+A7*R^6+A6*R^5+A5*R^4+A*R^3+B*R^2+C*R+(1-A6-A7-A6-A5-A-B-C)");
double ri = rNDi* this.pixelSize / this.distortionRadius; // relative to distortion radius
// double rD2rND = (1.0 - distortionA8 - distortionA7 - distortionA6 - distortionA5 - distortionA - distortionB - distortionC);
double rD2rND = 1.0;
double rri = 1.0;
for (int j = 0; j < a.length; j++){
rri *= ri;
rD2rND += a[j]*(rri - a[j]);
}
double pXid = pXci * rD2rND;
double pYid = pYci * rD2rND;
// individual rotate (check sign)
double c_roll = Math.cos(this.roll[i] * Math.PI/180.0);
double s_roll = Math.sin(this.roll[i] * Math.PI/180.0);
pXY[i][0] = c_roll * pXid + s_roll* pYid + this.pXY0[i][0];
pXY[i][1] = -s_roll * pXid + c_roll* pYid + this.pXY0[i][1];
}
return pXY;
}
// Copied from PixelMapping
/**
* Calculate reverse distortion table - from pixel radius to non-distorted radius
* Rdist/R=A5*R^4+A*R^3+B*R^2+C*R+(1-A5-A-B-C)
* @return false if distortion is too high
*/
public boolean calcReverseDistortionTable(){
boolean debugThis=false; //true;
double delta=1E-8;
double minDerivative=0.1;
int numIterations=1000;
double drDistDr=1.0;
// public double distortionA5=0.0; //r^5 (normalized to focal length or to sensor half width?)
// public double distortionA=0.0; // r^4 (normalized to focal length or to sensor half width?)
// public double distortionB=0.0; // r^3
// public double distortionC=0.0; // r^2
boolean use8=(this.distortionA8!=0.0) || (this.distortionA7!=0.0) || (this.distortionA6!=0.0);
double d=1.0-this.distortionA8-this.distortionA7-this.distortionA6-this.distortionA5-this.distortionA-this.distortionB-this.distortionC;
double rPrev=0.0;
this.rByRDist=new double [(int) Math.ceil(this.maxR/this.stepR)+1];
for (int j=1;j<this.rByRDist.length;j++) this.rByRDist[j]=Double.NaN;
this.rByRDist[0]=1.0/d;
boolean bailOut=false;
if (debugThis) System.out.println("calcReverseDistortionTable()");
for (int i=1;i<this.rByRDist.length;i++) {
double rDist=this.stepR*i;
double r=rPrev+this.stepR/drDistDr;
// if (debugThis) System.out.println("calcReverseDistortionTable() i="+i+" rDist="+rDist+" r="+r+" rPrev="+rPrev);
for (int iteration=0;iteration<numIterations;iteration++){
double k;
if (use8){
k=(((((((this.distortionA8)*r+this.distortionA7)*r+this.distortionA6)*r+this.distortionA5)*r + this.distortionA)*r+this.distortionB)*r+this.distortionC)*r+d;
drDistDr=(((((((8*this.distortionA8)*r + 7*this.distortionA7)*r + 6*this.distortionA6)*r + 5*this.distortionA5)*r + 4*this.distortionA)*r+3*this.distortionB)*r+2*this.distortionC)*r+d;
} else {
k=(((this.distortionA5*r + this.distortionA)*r+this.distortionB)*r+this.distortionC)*r+d;
drDistDr=(((5*this.distortionA5*r + 4*this.distortionA)*r+3*this.distortionB)*r+2*this.distortionC)*r+d;
}
double rD=r*k;
if (drDistDr<minDerivative) {
bailOut=true;
break; // too high distortion
}
if (Math.abs(rD-rDist)<delta) break; // success
r+=(rDist-rD)/drDistDr;
}
if (bailOut) {
if (debugThis) System.out.println("calcReverseDistortionTable() i="+i+" Bailing out, drDistDr="+drDistDr);
return false;
}
rPrev=r;
this.rByRDist[i]=r/rDist;
if (debugThis) System.out.println("calcReverseDistortionTable() i="+i+" rDist="+rDist+" r="+r+" rPrev="+rPrev+" this.rByRDist[i]="+this.rByRDist[i]);
}
return true;
}
double result=this.rByRDist[index]+(this.rByRDist[index+1]-this.rByRDist[index])*(rDist/this.stepR-index);
if (Double.isNaN(result)){
if (debug) System.out.println("this.rByRDist["+index+"]="+this.rByRDist[index]);
if (debug) System.out.println("this.rByRDist["+(index+1)+"]="+this.rByRDist[index+1]);
if (debug) System.out.println("rDist="+rDist);
if (debug) System.out.println("(rDist/this.stepR="+(rDist/this.stepR));
public double getRByRDist(double rDist, boolean debug){
// add exceptions;
if (this.rByRDist==null) {
calcReverseDistortionTable();
if (debug)System.out.println("getRByRDist("+IJ.d2s(rDist,3)+"): this.rByRDist==null");
// return Double.NaN;
}
if (rDist<0) {
if (debug)System.out.println("getRByRDist("+IJ.d2s(rDist,3)+"): rDist<0");
return Double.NaN;
}
int index=(int) Math.floor(rDist/this.stepR);
if (index>=(this.rByRDist.length-1)) {
if (debug) System.out.println("getRByRDist("+IJ.d2s(rDist,3)+"): index="+index+">="+(this.rByRDist.length-1));
return Double.NaN;
}
double result=this.rByRDist[index]+(this.rByRDist[index+1]-this.rByRDist[index])*(rDist/this.stepR-index);
if (Double.isNaN(result)){
if (debug) System.out.println("this.rByRDist["+index+"]="+this.rByRDist[index]);
if (debug) System.out.println("this.rByRDist["+(index+1)+"]="+this.rByRDist[index+1]);
if (debug) System.out.println("rDist="+rDist);
if (debug) System.out.println("(rDist/this.stepR="+(rDist/this.stepR));
}
return result;
}
return result;
}
......
......@@ -701,8 +701,10 @@ public class ImageDtt {
chn=nTile/nTilesInChn;
tileY =(nTile % nTilesInChn)/tilesX;
tileX = nTile % tilesX;
centerX = tileX * transform_size - transform_size/2 - shiftX;
centerY = tileY * transform_size - transform_size/2 - shiftY;
// centerX = tileX * transform_size - transform_size/2 - shiftX;
// centerY = tileY * transform_size - transform_size/2 - shiftY;
centerX = tileX * transform_size + transform_size/2 - shiftX;
centerY = tileY * transform_size + transform_size/2 - shiftY;
double [] fract_shiftXY = extract_correct_tile( // return a pair of resudual offsets
imade_data,
......@@ -715,10 +717,10 @@ public class ImageDtt {
chn,
centerX, // center of aberration-corrected (common model) tile, X
centerY, //
(globalDebugLevel > -1) && (tileX == debug_tileX) && (tileY == debug_tileY) && (chn == 2), // external tile compare
(globalDebugLevel > 0) && (tileX == debug_tileX) && (tileY == debug_tileY) && (chn == 2), // external tile compare
no_deconvolution,
transpose);
if ((globalDebugLevel > -1) && (debug_tileX == tileX) && (debug_tileY == tileY) && (chn == 2)) {
if ((globalDebugLevel > 0) && (debug_tileX == tileX) && (debug_tileY == tileY) && (chn == 2)) {
showDoubleFloatArrays sdfa_instance = new showDoubleFloatArrays(); // just for debugging?
String [] titles = {"CC","SC","CS","SS"};
sdfa_instance.showArrays(clt_data[chn][tileY][tileX], transform_size, transform_size, true, "pre-shifted_x"+tileX+"_y"+tileY, titles);
......@@ -740,7 +742,7 @@ public class ImageDtt {
// (globalDebugLevel > 0) && (tileX == debug_tileX) && (tileY == debug_tileY)); // external tile compare
((globalDebugLevel > 0) && (chn==0) && (tileX >= debug_tileX - 2) && (tileX <= debug_tileX + 2) &&
(tileY >= debug_tileY - 2) && (tileY <= debug_tileY+2)));
if ((globalDebugLevel > -1) && (debug_tileX == tileX) && (debug_tileY == tileY)) {
if ((globalDebugLevel > 0) && (debug_tileX == tileX) && (debug_tileY == tileY)) {
showDoubleFloatArrays sdfa_instance = new showDoubleFloatArrays(); // just for debugging?
String [] titles = {"CC","SC","CS","SS"};
sdfa_instance.showArrays(clt_data[chn][tileY][tileX], transform_size, transform_size, true, "shifted_x"+tileX+"_y"+tileY, titles);
......@@ -863,10 +865,12 @@ public class ImageDtt {
final int tilesY=dct_data.length;
final int tilesX=dct_data[0].length;
final int width= (tilesX+1)*dct_size;
final int height= (tilesY+1)*dct_size;
// final int width= (tilesX+1)*dct_size;
// final int height= (tilesY+1)*dct_size;
final int width= tilesX * dct_size;
final int height= tilesY * dct_size;
final double debug_scale = 1.0 /((debug_mask & 1) + ((debug_mask >> 1) & 1) + ((debug_mask >> 2) & 1) + ((debug_mask >> 3) & 1));
if (globalDebugLevel > 0) {
if (globalDebugLevel > -1) {
System.out.println("iclt_2d():tilesX= "+tilesX);
System.out.println("iclt_2d():tilesY= "+tilesY);
System.out.println("iclt_2d():width= "+width);
......@@ -903,6 +907,10 @@ public class ImageDtt {
double [] tile_mdct;
int tileY,tileX;
int n2 = dct_size * 2;
int n_half = dct_size / 2;
int lastY = tilesY-1;
int lastX = tilesX-1;
int offset = n_half * (dct_size * tilesX) + n_half;
for (int nTile = ai.getAndIncrement(); nTile < tiles_list[nser.get()].length; nTile = ai.getAndIncrement()) {
tileX = tiles_list[nser.get()][nTile][0];
tileY = tiles_list[nser.get()][nTile][1];
......@@ -916,11 +924,30 @@ public class ImageDtt {
tile_dct = dtt.dttt_iv (tile_in, idct_mode, dct_size);
}
tile_mdct = dtt.unfold_tile(tile_dct, dct_size, dct_mode); // mode=0 - DCCT
for (int i = 0; i < n2;i++){
int start_line = ((tileY*dct_size + i) *(tilesX+1) + tileX)*dct_size;
for (int j = 0; j<n2;j++) {
dpixels[start_line + j] += debug_scale * tile_mdct[n2 * i + j]; // add (cc+sc+cs+ss)/4
if ((tileY >0) && (tileX > 0) && (tileY < lastY) && (tileX < lastX)) { // fast, no extra checks
for (int i = 0; i < n2;i++){
// int start_line = ((tileY*dct_size + i) *(tilesX+1) + tileX)*dct_size;
int start_line = ((tileY*dct_size + i) * tilesX + tileX)*dct_size - offset;
for (int j = 0; j<n2;j++) {
dpixels[start_line + j] += debug_scale * tile_mdct[n2 * i + j]; // add (cc+sc+cs+ss)/4
}
}
} else { // be careful with margins
for (int i = 0; i < n2;i++){
if ( ((tileY > 0) && (tileY < lastY)) ||
((tileY == 0) && (i >= n_half)) ||
((tileY == lastY) && (i < (n2 - n_half)))) {
int start_line = ((tileY*dct_size + i) * tilesX + tileX)*dct_size - offset;
for (int j = 0; j<n2;j++) {
if ( ((tileX > 0) && (tileX < lastX)) ||
((tileX == 0) && (j >= n_half)) ||
((tileX == lastX) && (j < (n2 - n_half)))) {
dpixels[start_line + j] += debug_scale * tile_mdct[n2 * i + j]; // add (cc+sc+cs+ss)/4
}
}
}
}
}
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment