Commit 61e720a6 authored by Andrey Filippov's avatar Andrey Filippov

debugging/testing interscene accumulation, before LOG

parent a6dac489
......@@ -233,18 +233,22 @@ public class EyesisCorrections {
public ImagePlus getJp4Tiff(
String path,
int [] woi_tops) {
return getJp4Tiff(path, false, woi_tops);
int [] woi_tops,
int [] camera_heights
) {
return getJp4Tiff(path, false, woi_tops, camera_heights);
}
public ImagePlus getJp4Tiff(
String path) {
return getJp4Tiff(path, false, null);
return getJp4Tiff(path, false, null, null);
}
public ImagePlus getJp4Tiff(
String path,
boolean ignore_alien, // open image even if it does not belong to the current camera
int [] woi_tops) {
int [] woi_tops,
int [] camera_heights
) {
// get source file channel
int src_channel = correctionsParameters.getChannelFromSourceTiff(path);
int sub_camera = src_channel - correctionsParameters.firstSubCamera;
......@@ -279,6 +283,9 @@ public class EyesisCorrections {
if (imp.getProperty("WOI_TOP") != null)
woi_tops[sensor_number] = Integer.parseInt((String) imp.getProperty("WOI_TOP"));
}
if ((camera_heights != null) && (sensor_number< camera_heights.length)) { // actually acquired height (not padded)
camera_heights[sensor_number] = imp.getHeight();
}
} else if (!ignore_alien) {
return null;
}
......
......@@ -703,6 +703,7 @@ private Panel panel1,
addButton("Inter Pairs", panelClt5, color_process);
addButton("Inter LMA", panelClt5, color_stop);
addButton("Inter Series", panelClt5, color_process);
addButton("Inter Accumulate", panelClt5, color_process);
plugInFrame.add(panelClt5);
}
......@@ -5124,6 +5125,14 @@ private Panel panel1,
CLT_PARAMETERS.batch_run = true;
interSeriesLMA();
return;
/* ======================================================================== */
} else if (label.equals("Inter Accumulate")) {
DEBUG_LEVEL=MASTER_DEBUG_LEVEL;
EYESIS_CORRECTIONS.setDebug(DEBUG_LEVEL);
CLT_PARAMETERS.batch_run = true;
intersceneAccumulate();
return;
/* ======================================================================== */
} else if (label.equals("Inter LMA")) {
......@@ -5646,7 +5655,7 @@ private Panel panel1,
if (DEBUG_LEVEL > -2){
System.out.println("++++++++++++++ Running initSensorFiles for the main camera ++++++++++++++");
}
EYESIS_CORRECTIONS.initSensorFiles(
EYESIS_CORRECTIONS.initSensorFiles( // long
DEBUG_LEVEL+2,
false, // true,
true, // false,
......@@ -6716,6 +6725,73 @@ private Panel panel1,
return true;
}
public boolean intersceneAccumulate() {
long startTime=System.nanoTime();
// load needed sensor and kernels files
if (!prepareRigImages()) return false;
String configPath=getSaveCongigPath();
if (configPath.equals("ABORT")) return false;
setAllProperties(PROPERTIES); // batchRig may save properties with the model. Extrinsics will be updated, others should be set here
if (DEBUG_LEVEL > -2){
System.out.println("++++++++++++++ Testing Interscene processing ++++++++++++++");
}
if (CLT_PARAMETERS.useGPU()) { // only init GPU instances if it is used
if (GPU_TILE_PROCESSOR == null) {
try {
GPU_TILE_PROCESSOR = new GPUTileProcessor(CORRECTION_PARAMETERS.tile_processor_gpu);
} catch (Exception e) {
System.out.println("Failed to initialize GPU class");
// TODO Auto-generated catch block
e.printStackTrace();
return false;
} //final int debugLevel);
}
if (CLT_PARAMETERS.useGPU(false) && (QUAD_CLT != null) && (GPU_QUAD == null)) { // if GPU main is needed
try {
GPU_QUAD = GPU_TILE_PROCESSOR.new GpuQuad(
QUAD_CLT,
4,
3);
} catch (Exception e) {
System.out.println("Failed to initialize GpuQuad class");
// TODO Auto-generated catch block
e.printStackTrace();
return false;
} //final int debugLevel);
QUAD_CLT.setGPU(GPU_QUAD);
}
}
try {
TWO_QUAD_CLT.intersceneAccumulate(
QUAD_CLT, // QuadCLT quadCLT_main,
CLT_PARAMETERS, // EyesisCorrectionParameters.DCTParameters dct_parameters,
DEBAYER_PARAMETERS, //EyesisCorrectionParameters.DebayerParameters debayerParameters,
COLOR_PROC_PARAMETERS, //EyesisCorrectionParameters.ColorProcParameters colorProcParameters,
CHANNEL_GAINS_PARAMETERS, //CorrectionColorProc.ColorGainsParameters channelGainParameters,
RGB_PARAMETERS, //EyesisCorrectionParameters.RGBParameters rgbParameters,
EQUIRECTANGULAR_PARAMETERS, // EyesisCorrectionParameters.EquirectangularParameters equirectangularParameters,
PROPERTIES, // Properties properties,
THREADS_MAX, //final int threadsMax, // maximal number of threads to launch
UPDATE_STATUS, //final boolean updateStatus,
DEBUG_LEVEL);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
} //final int debugLevel);
if (configPath!=null) {
saveTimestampedProperties( // save config again
configPath, // full path or null
null, // use as default directory if path==null
true,
PROPERTIES);
}
System.out.println("batchRig(): Processing finished at "+
IJ.d2s(0.000000001*(System.nanoTime()-startTime),3)+" sec, --- Free memory="+
Runtime.getRuntime().freeMemory()+" (of "+Runtime.getRuntime().totalMemory()+")");
return true;
}
......
......@@ -63,6 +63,7 @@ import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import java.util.concurrent.CopyOnWriteArrayList;
......@@ -2718,19 +2719,33 @@ public class GPUTileProcessor {
* by the caller. They are calculated by recalculating from the reference scene after appropriate transformation (shift, rotation
* and ERS correction)
* @param pXpYD Array of per-tile pX, pY and disparity triplets (or nulls for undefined tiles).
* @param task_code Put this value (typically 512?) for each tile in task field.
* @param geometryCorrection GeometryCorrection instance for the camera.
* @param disparity_corr Disparity correction at infinity
* @param margin Skip tile if at least one channel tile center is closer to the image edge than this margin.
* @param valid_tiles Optional (if not null) should be initialized as boolean [tiles] - will contain valid tiles
* @param threadsMax Maximal number of threads to run concurrently.
* @return Array of TpTask instances (fully prepared) to be fed to the GPU
*/
public TpTask[] setInterTasks(
double [][] pXpYD, // per-tile array of pX,pY,disparity triplets (or nulls)
int task_code, // code to use for active tiles
final double [][] pXpYD, // per-tile array of pX,pY,disparity triplets (or nulls)
final GeometryCorrection geometryCorrection,
final double disparity_corr,
final int margin, // do not use tiles if their centers are closer to the edges
final boolean [] valid_tiles,
final int threadsMax) // maximal number of threads to launch
{
final int task_code = ((1 << NUM_PAIRS)-1) << TASK_CORR_BITS; // correlation only
final double min_px = margin;
final double max_px = img_width - 1 - margin;
final double [] min_py = new double[num_cams] ;
final double [] max_py = new double[num_cams] ;
for (int i = 0; i < num_cams; i++) {
min_py [i] = margin + geometryCorrection.getWOITops()[i];
max_py [i] = geometryCorrection.getWOITops()[i] + geometryCorrection.getCameraHeights()[i] - 1 - margin;
}
if (valid_tiles!=null) {
Arrays.fill(valid_tiles, false);
}
final int tilesX = img_width / DTT_SIZE;
final int tiles = pXpYD.length;
final Matrix [] corr_rots = geometryCorrection.getCorrVector().getRotMatrices(); // get array of per-sensor rotation matrices
......@@ -2738,33 +2753,23 @@ public class GPUTileProcessor {
final Thread[] threads = ImageDtt.newThreadArray(threadsMax);
final AtomicInteger ai = new AtomicInteger(0);
final AtomicInteger aTiles = new AtomicInteger(0);
final int [] tile_indices = new int [tiles];
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nTile = ai.getAndIncrement(); nTile < tiles; nTile = ai.getAndIncrement()) if (pXpYD[nTile] != null) {
tile_indices[aTiles.getAndIncrement()] = nTile;
}
}
};
}
ImageDtt.startAndJoin(threads);
ai.set(0);
final TpTask[] tp_tasks = new TpTask[aTiles.get()];
final TpTask[] tp_tasks = new TpTask[tiles]; // aTiles.get()];
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
@Override
public void run() {
for (int indx = ai.getAndIncrement(); indx < tp_tasks.length; indx = ai.getAndIncrement()) {
int nTile = tile_indices[indx];
// for (int indx = ai.getAndIncrement(); indx < tp_tasks.length; indx = ai.getAndIncrement()) {
// int nTile = tile_indices[indx];
for (int nTile = ai.getAndIncrement(); nTile < tiles; nTile = ai.getAndIncrement()) if (pXpYD[nTile] != null) {
TpTask tp_task = new TpTask();
int tileY = nTile / tilesX;
int tileX = nTile % tilesX;
tp_tasks[nTile].ty = tileY;
tp_tasks[nTile].tx = tileX;
tp_tasks[nTile].task = task_code;
tp_task.ty = tileY;
tp_task.tx = tileX;
tp_task.task = task_code;
double disparity = pXpYD[nTile][2] + disparity_corr;
tp_tasks[nTile].target_disparity = (float) disparity; // will it be used?
tp_task.target_disparity = (float) disparity; // will it be used?
double [][] disp_dist_main = new double[quad_main][]; // used to correct 3D correlations (not yet used here)
double [][] centersXY_main = geometryCorrection.getPortsCoordinatesAndDerivatives(
geometryCorrection, // GeometryCorrection gc_main,
......@@ -2776,17 +2781,32 @@ public class GPUTileProcessor {
pXpYD[nTile][0],
pXpYD[nTile][1],
disparity); // + disparity_corr);
tp_tasks[nTile].xy = new float [centersXY_main.length][2];
tp_task.xy = new float [centersXY_main.length][2];
boolean bad_margins = false;
for (int i = 0; i < centersXY_main.length; i++) {
tp_tasks[nTile].xy[i][0] = (float) centersXY_main[i][0];
tp_tasks[nTile].xy[i][1] = (float) centersXY_main[i][1];
if ( (centersXY_main[i][0] < min_px) || (centersXY_main[i][0] > max_px) ||
(centersXY_main[i][1] < min_py[i]) || (centersXY_main[i][1] > max_py[i])) {
bad_margins = true;
break;
}
tp_task.xy[i][0] = (float) centersXY_main[i][0];
tp_task.xy[i][1] = (float) centersXY_main[i][1];
}
if (bad_margins) {
continue;
}
tp_tasks[aTiles.getAndIncrement()] = tp_task;
if (valid_tiles!=null) {
valid_tiles[nTile] = true;
}
}
}
};
}
ImageDtt.startAndJoin(threads);
return tp_tasks;
final TpTask[] tp_tasks_out = new TpTask[aTiles.get()];
System.arraycopy(tp_tasks, 0, tp_tasks_out, 0, tp_tasks_out.length);
return tp_tasks_out;
}
......
......@@ -357,6 +357,7 @@ public class ErsCorrection extends GeometryCorrection {
for (String ts:scenes_poses.keySet()) {
scenes[i++] = ts;
}
Arrays.sort(scenes);
return scenes;
}
......@@ -678,6 +679,7 @@ public class ErsCorrection extends GeometryCorrection {
extrinsic_corr = gc.extrinsic_corr; // ;
rigOffset = gc.rigOffset; // = null;
woi_tops = gc.woi_tops; // = null; // used to calculate scanline timing
camera_heights = gc.camera_heights; // = null; // used to calculate scanline timing
if (deep) {
forward = clone1d(forward);
right = clone1d(right);
......@@ -692,6 +694,7 @@ public class ErsCorrection extends GeometryCorrection {
extrinsic_corr = extrinsic_corr.clone();
if (rigOffset!=null) rigOffset = rigOffset.clone();
woi_tops = clone1d(woi_tops);
camera_heights = clone1d(camera_heights);
}
resetScenes(); // no scenes yet
// generate initial ers velocity and roll
......
......@@ -109,8 +109,9 @@ public class GeometryCorrection {
public CorrVector extrinsic_corr;
public RigOffset rigOffset = null;
public int [] woi_tops = null; // used to calculate scanline timing
public RigOffset rigOffset = null;
public int [] woi_tops = null; // used to calculate scanline timing
public int [] camera_heights = null; // actual acquired lines (from woi_tops)
public float [] toFloatArray() { // for GPU comparison
......@@ -155,6 +156,7 @@ public class GeometryCorrection {
(float) cameraRadius, // average distance from the "mass center" of the sensors to the sensors
(float) disparityRadius, //=150.0; // distance between cameras to normalize disparity units to. sqrt(2)*disparityRadius for quad
woi_tops[0],woi_tops[1],woi_tops[2],woi_tops[3]
// TODO: ADD camera_heights[0], camera_heights[1], camera_heights[2], camera_heights[3],
};
}
public static int arrayLength(int ncam) {
......@@ -199,13 +201,18 @@ public class GeometryCorrection {
cameraRadius, // average distance from the "mass center" of the sensors to the sensors
disparityRadius, //=150.0; // distance between cameras to normalize disparity units to. sqrt(2)*disparityRadius for quad
woi_tops[0],woi_tops[1],woi_tops[2],woi_tops[3]
// TODO: ADD camera_heights[0], camera_heights[1], camera_heights[2], camera_heights[3],
};
}
public int [] getWOITops() {// not used in lwir
return woi_tops;
}
public int [] getCameraHeights() {
return camera_heights;
}
public double [][] getPXY0(){
return this.pXY0;
}
......@@ -349,6 +356,7 @@ public class GeometryCorrection {
pXY0 = new double [numSensors][2];
rXY = new double [numSensors][2];
woi_tops = new int [numSensors];
camera_heights = new int [numSensors];
resetCorrVector();
}
......
......@@ -24,6 +24,7 @@ package com.elphel.imagej.tileprocessor;
*/
// ← → ↑ ↓ ⇖ ⇗ ⇘ ⇙ ↔ ↕
import java.util.Arrays;
import java.util.concurrent.atomic.AtomicInteger;
import com.elphel.imagej.cameras.CLTParameters;
......@@ -125,6 +126,10 @@ public class ImageDttCPU {
"g0","g1","g2","g3",
"b0","b1","b2","b3",
};
static public String[] CORR_TITLES = {
"top","bottom","left","right","diag-m","diag-o",
"quad","cross","hor","vert",
"s-hor","s-vert","s-quad","s-cross","s-quad-cross","s-combo"};
// "dbg0","dbg1","dbg2","dbg3","dbg4","dbg5","dbg6","dbg7","dbg8","dbg9","dbg10","dbg11","dbg12","dbg13","dbg14","dbg15","dbg16","dbg17","dbg18"};
static int BI_DISP_FULL_INDEX = 0; // 0 - disparity for all directions of the main camera
......@@ -5880,10 +5885,81 @@ public class ImageDttCPU {
}
// final float [][][][] fcorr_td = new float[tilesY][tilesX][][];
// final float [][][][] fcorr_combo_td = new float[4][tilesY][tilesX][];
public static float [][] corr_td_dbg(
final float [][][][] fcorr_td,
// if 0 - fcorr_combo_td = new float[4][tilesY][tilesX][];
// if > 0 - fcorr_td = new float[tilesY][tilesX][num_slices][];
final int num_slices,
final int transform_size,
final int [] wh, // should be initialized as int[2];
final int threadsMax) // maximal number of threads to launch
{
final int tilesY = (num_slices == 0) ? fcorr_td[0].length : fcorr_td.length;
final int tilesX = (num_slices == 0) ? fcorr_td[0][0].length : fcorr_td[0].length;
final int nTiles = tilesX*tilesY;
final int width = tilesX * 2 * transform_size;
final int height = tilesY * 2 * transform_size;
if (wh != null) {
wh[0] = width;
wh[1] = height;
}
final int fnum_slices = (num_slices == 0) ? fcorr_td.length : num_slices;
final int transform_len = transform_size*transform_size; // 64
float [][] dbg_img = new float [fnum_slices][width * height];
for (int i = 0; i < dbg_img.length; i++) {
Arrays.fill(dbg_img[i], Float.NaN);
}
final Thread[] threads = newThreadArray(threadsMax);
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
@Override
public void run() {
for (int nTile = ai.getAndIncrement(); nTile < nTiles; nTile = ai.getAndIncrement()) {
int tileY = nTile/tilesX;
int tileX = nTile - tileY * tilesX;
if ((num_slices == 0) || (fcorr_td[tileY][tileX] != null)) {
for (int slice = 0; slice < fnum_slices; slice ++) {
float [] ftile = (num_slices > 0) ? fcorr_td[tileY][tileX][slice] : fcorr_td[slice][tileY][tileX];
if (ftile != null) {
for (int qy = 0; qy < 2; qy++) {
for (int qx = 0; qx < 2; qx++) {
for (int ty = 0; ty < transform_size; ty++) {
int py = (tileY * 2 + qy) * transform_size + ty;
int px = (tileX * 2 + qx) * transform_size;
System.arraycopy(
ftile,
(2 * qy + qx) * transform_len + transform_size * ty,
dbg_img[slice],
py * width + px,
transform_size);
/*
for (int tx = 0; tx < transform_size; tx++) {
int px = (tileX * 2 + qx) * transform_size + tx;
dbg_img[slice][py * width + px] = ftile[(2 * qy + qx) * transform_len + transform_size * ty + tx];
}
*/
}
}
}
}
}
}
}
}
};
}
startAndJoin(threads);
return dbg_img;
}
// extract correlation result in linescan order (for visualization)
public double [][] corr_partial_dbg( // not used in lwir
public static double [][] corr_partial_dbg( // not used in lwir
final double [][][][][] corr_data,
final int corr_size,
final int pairs,
......
......@@ -1935,7 +1935,7 @@ public class QuadCLTCPU {
// int srcChannel=correctionsParameters.getChannelFromSourceTiff(sourceFiles[nFile]);
int srcChannel=fileIndices[iImage][1];
imp_src = eyesisCorrections.getJp4Tiff(sourceFiles[nFile], this.geometryCorrection.woi_tops);
imp_src = eyesisCorrections.getJp4Tiff(sourceFiles[nFile], this.geometryCorrection.woi_tops, this.geometryCorrection.camera_heights);
double scaleExposure=1.0;
if (!Double.isNaN(referenceExposures[nFile]) && (imp_src.getProperty("EXPOSURE")!=null)){
......@@ -2509,7 +2509,7 @@ public class QuadCLTCPU {
int nFile=channelFiles[srcChannel];
imp_srcs[srcChannel]=null;
if (nFile >=0){
imp_srcs[srcChannel] = eyesisCorrections.getJp4Tiff(sourceFiles[nFile], this.geometryCorrection.woi_tops);
imp_srcs[srcChannel] = eyesisCorrections.getJp4Tiff(sourceFiles[nFile], this.geometryCorrection.woi_tops, this.geometryCorrection.camera_heights);
scaleExposure[srcChannel] = 1.0;
if (!Double.isNaN(referenceExposures[nFile]) && (imp_srcs[srcChannel].getProperty("EXPOSURE")!=null)){
......@@ -3117,7 +3117,7 @@ public class QuadCLTCPU {
int nFile=channelFiles[srcChannel];
imp_srcs[srcChannel]=null;
if (nFile >=0){
imp_srcs[srcChannel] = eyesisCorrections.getJp4Tiff(sourceFiles[nFile], this.geometryCorrection.woi_tops);
imp_srcs[srcChannel] = eyesisCorrections.getJp4Tiff(sourceFiles[nFile], this.geometryCorrection.woi_tops, this.geometryCorrection.camera_heights);
scaleExposures[srcChannel] = 1.0;
if (!Double.isNaN(referenceExposures[nFile]) && (imp_srcs[srcChannel].getProperty("EXPOSURE")!=null)){
......@@ -3730,6 +3730,8 @@ public class QuadCLTCPU {
this.image_name = set_name;
ImagePlus [] imp_srcs = new ImagePlus[channelFiles.length];
this.geometryCorrection.woi_tops = new int [channelFiles.length];
this.geometryCorrection.camera_heights = new int [channelFiles.length];
double [][] dbg_dpixels = new double [channelFiles.length][];
boolean is_lwir = colorProcParameters.lwir_islwir;
boolean ignore_saturation = is_lwir;
......@@ -3742,7 +3744,7 @@ public class QuadCLTCPU {
int nFile=channelFiles[srcChannel]; // channelFiles[srcChannel];
imp_srcs[srcChannel]=null;
if (nFile >=0){
imp_srcs[srcChannel] = eyesisCorrections.getJp4Tiff(sourceFiles[nFile], this.geometryCorrection.woi_tops);
imp_srcs[srcChannel] = eyesisCorrections.getJp4Tiff(sourceFiles[nFile], this.geometryCorrection.woi_tops, this.geometryCorrection.camera_heights);
scaleExposures[srcChannel] = 1.0;
if (!(referenceExposures == null) && !Double.isNaN(referenceExposures[nFile]) && (imp_srcs[srcChannel].getProperty("EXPOSURE")!=null)){
......@@ -6424,14 +6426,15 @@ public class QuadCLTCPU {
}
ImagePlus [] imp_srcs = new ImagePlus[channelFiles.length];
this.geometryCorrection.woi_tops = new int [channelFiles.length];
this.geometryCorrection.woi_tops = new int [channelFiles.length];
this.geometryCorrection.camera_heights = new int [channelFiles.length];
boolean [][] saturation_imp = (clt_parameters.sat_level > 0.0)? new boolean[channelFiles.length][] : null;
double [] scaleExposures = new double[channelFiles.length];
for (int srcChannel=0; srcChannel<channelFiles.length; srcChannel++){
int nFile=channelFiles[srcChannel];
imp_srcs[srcChannel]=null;
if (nFile >=0){
imp_srcs[srcChannel] = eyesisCorrections.getJp4Tiff(sourceFiles[nFile], this.geometryCorrection.woi_tops);
imp_srcs[srcChannel] = eyesisCorrections.getJp4Tiff(sourceFiles[nFile], this.geometryCorrection.woi_tops, this.geometryCorrection.camera_heights);
scaleExposures[srcChannel] = 1.0;
if (!Double.isNaN(referenceExposures[nFile]) && (imp_srcs[srcChannel].getProperty("EXPOSURE")!=null)){
......@@ -11583,13 +11586,15 @@ public class QuadCLTCPU {
ImagePlus [] imp_srcs = new ImagePlus[channelFiles.length];
this.geometryCorrection.woi_tops = new int [channelFiles.length];
this.geometryCorrection.camera_heights = new int [channelFiles.length];
double [][] dbg_dpixels = batch_mode? null : (new double [channelFiles.length][]);
for (int srcChannel=0; srcChannel<channelFiles.length; srcChannel++){
int nFile=channelFiles[srcChannel];
imp_srcs[srcChannel]=null;
if (nFile >=0){
imp_srcs[srcChannel] = eyesisCorrections.getJp4Tiff(sourceFiles[nFile], this.geometryCorrection.woi_tops);
imp_srcs[srcChannel] = eyesisCorrections.getJp4Tiff(sourceFiles[nFile], this.geometryCorrection.woi_tops, this.geometryCorrection.camera_heights);
scaleExposures[srcChannel] = 1.0;
if (!Double.isNaN(referenceExposures[nFile]) && (imp_srcs[srcChannel].getProperty("EXPOSURE")!=null)){
......
......@@ -38,6 +38,7 @@ import java.nio.channels.WritableByteChannel;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Properties;
import java.util.Random;
......@@ -8523,17 +8524,12 @@ if (debugLevel > -100) return true; // temporarily !
threadsMax,
debugLevel);
// temporarily fix wrong sign:
ErsCorrection ers = (ErsCorrection) (quadCLTs[i].getGeometryCorrection());
// if (reset_from_extrinsics) {
// System.out.println("Reset ERS parameters from intraframe extrinsics");
// ers.setupERSfromExtrinsics();
// }
// ErsCorrection ers = (ErsCorrection) (quadCLTs[i].getGeometryCorrection());
quadCLTs[i].setDSRBG(
clt_parameters, // CLTParameters clt_parameters,
threadsMax, // int threadsMax, // maximal number of threads to launch
updateStatus, // boolean updateStatus,
debugLevel); // int debugLevel)
/// quadCLTs[i].showDSIMain();
}
......@@ -8547,8 +8543,61 @@ if (debugLevel > -100) return true; // temporarily !
quadCLTs, // QuadCLT [] scenes, // ordered by increasing timestamps
clt_parameters.ofp.debug_level_optical); // 1); // -1); // int debug_level);
System.out.println("End of interSeriesLMA()");
}
public void intersceneAccumulate(
QuadCLT quadCLT_main, // tiles should be set
CLTParameters clt_parameters,
EyesisCorrectionParameters.DebayerParameters debayerParameters,
ColorProcParameters colorProcParameters,
CorrectionColorProc.ColorGainsParameters channelGainParameters,
EyesisCorrectionParameters.RGBParameters rgbParameters,
EyesisCorrectionParameters.EquirectangularParameters equirectangularParameters,
Properties properties,
final int threadsMax, // maximal number of threads to launch
final boolean updateStatus,
final int debugLevel) throws Exception
{
if ((quadCLT_main != null) && (quadCLT_main.getGPU() != null)) {
quadCLT_main.getGPU().resetGeometryCorrection();
quadCLT_main.gpuResetCorrVector(); // .getGPU().resetGeometryCorrectionVector();
}
// final boolean batch_mode = clt_parameters.batch_run;
this.startTime=System.nanoTime();
String [] sourceFiles0=quadCLT_main.correctionsParameters.getSourcePaths();
QuadCLT.SetChannels [] set_channels_main = quadCLT_main.setChannels(debugLevel);
if ((set_channels_main == null) || (set_channels_main.length==0)) {
System.out.println("No files to process (of "+sourceFiles0.length+")");
return;
}
QuadCLT.SetChannels [] set_channels=quadCLT_main.setChannels(debugLevel); // TODO: use just the last one (to need this is no time)
QuadCLT ref_quadCLT = quadCLT_main.spawnQuadCLT(
set_channels[set_channels.length-1].set_name,
clt_parameters,
colorProcParameters, //
threadsMax,
debugLevel);
// temporarily fix wrong sign:
// ErsCorrection ers = (ErsCorrection) (ref_quadCLT.getGeometryCorrection());
ref_quadCLT.setDSRBG( // runs GPU to calculate average R,B,G
clt_parameters, // CLTParameters clt_parameters,
threadsMax, // int threadsMax, // maximal number of threads to launch
updateStatus, // boolean updateStatus,
debugLevel); // int debugLevel)
OpticalFlow opticalFlow = new OpticalFlow(
threadsMax, // int threadsMax, // maximal number of threads to launch
updateStatus); // boolean updateStatus);
opticalFlow.IntersceneAccumulate(
clt_parameters, // CLTParameters clt_parameters,
colorProcParameters, // ColorProcParameters colorProcParameters,
ref_quadCLT, // QuadCLT [] scenes, // ordered by increasing timestamps
clt_parameters.ofp.debug_level_optical); // 1); // -1); // int debug_level);
System.out.println("End of intersceneAccumulate()");
}
public void batchLwirRig(
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment