...
 
Commits (2)
......@@ -11,12 +11,10 @@
<version>3.1</version>
<relativePath/>
</parent>
<!--
<properties>
<imagej.app.directory>/home/foxel/Desktop/Fiji.app</imagej.app.directory>
<maven.compiler.target>1.8</maven.compiler.target>
<maven.compiler.source>1.8</maven.compiler.source>
</properties>
-->
<groupId>com.elphel</groupId>
<artifactId>imagej-elphel</artifactId>
......
......@@ -194,7 +194,7 @@ public class GPUTileProcessor {
}
/**
* Initialize from the float array (read from the GPU)
* @param flt float array containig tasks data
* @param flt float array containing tasks data
* @param indx task number to use
*/
public TpTask(float [] flt, int indx, boolean use_aux)
......@@ -2713,6 +2713,83 @@ public class GPUTileProcessor {
ImageDtt.startAndJoin(threads);
}
/**
* Prepare GPU tasks for interscene accumulation - instead of the uniform grid, pixel coordinates and disparity are provided
* by the caller. They are calculated by recalculating from the reference scene after appropriate transformation (shift, rotation
* and ERS correction)
* @param pXpYD Array of per-tile pX, pY and disparity triplets (or nulls for undefined tiles).
* @param task_code Put this value (typically 512?) for each tile in task field.
* @param geometryCorrection GeometryCorrection instance for the camera.
* @param disparity_corr Disparity correction at infinity
* @param threadsMax Maximal number of threads to run concurrently.
* @return Array of TpTask instances (fully prepared) to be fed to the GPU
*/
public TpTask[] setInterTasks(
double [][] pXpYD, // per-tile array of pX,pY,disparity triplets (or nulls)
int task_code, // code to use for active tiles
final GeometryCorrection geometryCorrection,
final double disparity_corr,
final int threadsMax) // maximal number of threads to launch
{
final int tilesX = img_width / DTT_SIZE;
final int tiles = pXpYD.length;
final Matrix [] corr_rots = geometryCorrection.getCorrVector().getRotMatrices(); // get array of per-sensor rotation matrices
final int quad_main = (geometryCorrection != null)? num_cams:0;
final Thread[] threads = ImageDtt.newThreadArray(threadsMax);
final AtomicInteger ai = new AtomicInteger(0);
final AtomicInteger aTiles = new AtomicInteger(0);
final int [] tile_indices = new int [tiles];
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nTile = ai.getAndIncrement(); nTile < tiles; nTile = ai.getAndIncrement()) if (pXpYD[nTile] != null) {
tile_indices[aTiles.getAndIncrement()] = nTile;
}
}
};
}
ImageDtt.startAndJoin(threads);
ai.set(0);
final TpTask[] tp_tasks = new TpTask[aTiles.get()];
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
@Override
public void run() {
for (int indx = ai.getAndIncrement(); indx < tp_tasks.length; indx = ai.getAndIncrement()) {
int nTile = tile_indices[indx];
int tileY = nTile / tilesX;
int tileX = nTile % tilesX;
tp_tasks[nTile].ty = tileY;
tp_tasks[nTile].tx = tileX;
tp_tasks[nTile].task = task_code;
double disparity = pXpYD[nTile][2] + disparity_corr;
tp_tasks[nTile].target_disparity = (float) disparity; // will it be used?
double [][] disp_dist_main = new double[quad_main][]; // used to correct 3D correlations (not yet used here)
double [][] centersXY_main = geometryCorrection.getPortsCoordinatesAndDerivatives(
geometryCorrection, // GeometryCorrection gc_main,
false, // boolean use_rig_offsets,
corr_rots, // Matrix [] rots,
null, // Matrix [][] deriv_rots,
null, // double [][] pXYderiv, // if not null, should be double[8][]
disp_dist_main, // used to correct 3D correlations
pXpYD[nTile][0],
pXpYD[nTile][1],
disparity); // + disparity_corr);
tp_tasks[nTile].xy = new float [centersXY_main.length][2];
for (int i = 0; i < centersXY_main.length; i++) {
tp_tasks[nTile].xy[i][0] = (float) centersXY_main[i][0];
tp_tasks[nTile].xy[i][1] = (float) centersXY_main[i][1];
}
}
}
};
}
ImageDtt.startAndJoin(threads);
return tp_tasks;
}
public void setLpfRbg(
float [][] lpf_rbg, // 4 64-el. arrays: r,b,g,m
boolean debug)
......
......@@ -247,6 +247,19 @@ public class ErsCorrection extends GeometryCorrection {
}
public void setPropertiesLineTime(String prefix, Properties properties){
properties.setProperty(prefix+"_line_time", line_time+"");
}
public boolean getPropertiesLineTime(String prefix, Properties properties){
if (properties.getProperty(prefix+"_line_time")!=null) {
line_time = Double.parseDouble(properties.getProperty(prefix+"_line_time"));
return true;
} else {
line_time = OLD_LINE_TIME;
return false;
}
}
public void setPropertiesPose(String prefix, Properties properties){
......@@ -739,7 +752,11 @@ public class ErsCorrection extends GeometryCorrection {
// setup from extrinsics vector
public void setupERSfromExtrinsics()
{
double BUGS = 26.5/36.38; // tempoorary correction for the wrong scan line time!
double BUGS = 1.0;
if (line_time == OLD_LINE_TIME) {
BUGS = 26.5/36.38; // temporary correction for the wrong scan line time!
}
double [] ersv = getCorrVector().getIMU();
setupERS(
new double [3], // double [] wxyz_center, // world camera XYZ (meters) for the frame center
......
......@@ -44,6 +44,7 @@ public class GeometryCorrection {
// static final double FOCAL_LENGTH = 4.5; // nominal focal length - used as default and to convert editable parameters to pixels
// static final double DISTORTION_RADIUS = 2.8512; // nominal distortion radius - half width of the sensor
// static final double PIXEL_SIZE = 2.2; //um
public static double OLD_LINE_TIME = 26.5E-6;
static final String[] RIG_PAR_NAMES = {"azimuth", "tilt", "roll", "zoom", "angle", "baseline"};
public static String RIG_PREFIX = "rig-";
static double SCENE_UNITS_SCALE = 0.001; // meters from mm
......@@ -57,7 +58,7 @@ public class GeometryCorrection {
"velocity_x", "velocity_y", "velocity_z"};
public int debugLevel = 0;
public double line_time = 26.5E-6; // duration of sensor scan line (for ERS) Wrong, 36.38us (change and re-run ERS
public double line_time = 36.38E-6; // 26.5E-6; // duration of sensor scan line (for ERS) Wrong, 36.38us (change and re-run ERS
public int pixelCorrectionWidth=2592; // virtual camera center is at (pixelCorrectionWidth/2, pixelCorrectionHeight/2)
public int pixelCorrectionHeight=1936;
......
......@@ -3232,7 +3232,7 @@ public class OpticalFlow {
double k_prev,
QuadCLT reference_QuadCLT,
QuadCLT scene_QuadCLT,
double corr_scale, // = 0.75
double corr_scale, // = 0.75 - REMOVE
int debug_level)
{
TileProcessor tp = reference_QuadCLT.getTileProcessor();
......
......@@ -371,6 +371,7 @@ public class QuadCLTCPU {
ers.getPropertiesPose(prefix, properties);
ers.getPropertiesERS(prefix, properties);
ers.getPropertiesScenes(prefix, properties);
ers.getPropertiesLineTime(prefix, properties); // will set old value if not in the file
return properties;
}
......@@ -672,6 +673,7 @@ public class QuadCLTCPU {
ers.setPropertiesPose(prefix, properties);
ers.setPropertiesERS(prefix, properties);
ers.setPropertiesScenes(prefix, properties);
ers.setPropertiesLineTime(prefix, properties);
}
}
......@@ -8289,7 +8291,7 @@ public class QuadCLTCPU {
* @param debugLevel
* @return true on success, false - on failure
*/
public boolean extrinsicsCLT( // USED in lwir TODO: provide boolean
public boolean extrinsicsCLT(
CLTParameters clt_parameters,
boolean adjust_poly,
double inf_min, // = -1.0;
......