Commit 8e12622f authored by Andrey Filippov's avatar Andrey Filippov

Working on FOPEN

parent 273e58c7
...@@ -1672,7 +1672,7 @@ public class OrangeTest { ...@@ -1672,7 +1672,7 @@ public class OrangeTest {
for (int ipix=0; ipix < num_pixels; ipix++) { for (int ipix=0; ipix < num_pixels; ipix++) {
mask[nframe][ipix] = !(weights_in[nframe][ipix] > 0); mask[nframe][ipix] = !(weights_in[nframe][ipix] > 0);
} }
tn. growSelection( tn.growSelection(
shrink, // int grow, // grow tile selection by 1 over non-background tiles 1: 4 directions, 2 - 8 directions, 3 - 8 by 1, 4 by 1 more shrink, // int grow, // grow tile selection by 1 over non-background tiles 1: 4 directions, 2 - 8 directions, 3 - 8 by 1, 4 by 1 more
mask[nframe], // final boolean [] tiles, mask[nframe], // final boolean [] tiles,
null); // final boolean [] prohibit) null); // final boolean [] prohibit)
......
package com.elphel.imagej.orthomosaic;
import java.awt.Rectangle;
import java.util.Arrays;
import java.util.concurrent.atomic.AtomicInteger;
import com.elphel.imagej.cameras.CLTParameters;
import com.elphel.imagej.common.ShowDoubleFloatArrays;
import com.elphel.imagej.tileprocessor.ErsCorrection;
import com.elphel.imagej.tileprocessor.ImageDtt;
import com.elphel.imagej.tileprocessor.OpticalFlow;
import com.elphel.imagej.tileprocessor.QuadCLT;
import com.elphel.imagej.tileprocessor.TileNeibs;
import com.elphel.imagej.tileprocessor.TileProcessor;
import ij.ImagePlus;
/*
* in -INTER-INTRA-LMA.tiff: for pixels where ground==terrain, use disparity layer for both, where they are different
* use terrain for terrain, and disparity - for vegetation.
* Variant - use terrain== ground where they ate the same, and disparity - where they differ
*
*
*/
public class VegetationModel {
public static final int [][] FOUR_CORNERS_Z =
{
{0,1,8,2},
{8,2,4,3},
{6,8,5,4},
{7,0,6,8}
};
public static final int [][] XY_OFFS = {{0,-1},{0,0},{-1,0},{-1,-1}};
public static final int PXPYD_LEN=3;
public static final int [][] CORN_CW_OFFS = {{0,0},{1,0},{1,1},{0,1}};
public static double [][] getTerrainVegetation(
final double [][] combo_dsn,
final int tilesX,
final double min_above, // absolute disparity difference
final int patch_neibs,
final double neibs_pow){ // raise strength to this power when averaging neighbors
final double [] terrain = combo_dsn[OpticalFlow.COMBO_DSN_INDX_TERRAIN];
final double [] ground = combo_dsn[OpticalFlow.COMBO_DSN_INDX_GROUND];
final double [] disparity = combo_dsn[OpticalFlow.COMBO_DSN_INDX_LMA];
final double [] strength = combo_dsn[OpticalFlow.COMBO_DSN_INDX_STRENGTH];
final int num_pixels = disparity.length;
final double [] vegetation = new double [num_pixels];
Arrays.fill(vegetation, Double.NaN);
final int dbg_tile = -(22+17*80);
final Thread[] threads = ImageDtt.newThreadArray(QuadCLT.THREADS_MAX);
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
TileNeibs tn = new TileNeibs(tilesX, vegetation.length/tilesX);
for (int nTile = ai.getAndIncrement(); nTile < vegetation.length; nTile = ai.getAndIncrement()) {
if (nTile==dbg_tile) {
System.out.println("getTerrainVegetation(): nTile="+nTile);
}
if ((ground[nTile] > terrain[nTile]) &&(disparity[nTile]-terrain[nTile] > min_above)) {
vegetation[nTile] = disparity[nTile];
} else {
int num_neibs = 0;
double sum_wd = 0.0, sum_w = 0.0;
for (int dir = 0; dir < TileNeibs.DIRS; dir++) {
int nTile1 = tn.getNeibIndex(nTile, dir);
if ((nTile1 >= 0) &&
(disparity[nTile1]-terrain[nTile1] > min_above) &&
(ground[nTile1] > terrain[nTile1]) &&
(strength[nTile1] > 0)) {
double d = disparity[nTile1];
double w = Math.pow(strength[nTile1], neibs_pow);
sum_wd += d*w;
sum_w += w;
num_neibs++;
}
}
if (num_neibs >= patch_neibs) {
vegetation[nTile] = sum_wd/sum_w;
}
}
}
}
};
}
ImageDtt.startAndJoin(threads);
return new double [][] {terrain,vegetation};
}
public static void test_vegetation(
CLTParameters clt_parameters,
QuadCLT [] quadCLTs,
int ref_index,
int debugLevel) {
TileProcessor tp = quadCLTs[ref_index].getTileProcessor();
int tilesX = tp.getTilesX();
int tilesY = tp.getTilesY();
int tileSize = tp.getTileSize();
double min_above = 0.02; // 0.025; // .04;
int patch_neib = 4; // 5;
double neibs_pow = 0.5;
double [][] combo_dsn =quadCLTs[ref_index].restoreComboDSI(true);
double [][] terr_veg = getTerrainVegetation(
combo_dsn, // double [][] combo_dsn);
tilesX, // final int tilesX,
min_above, // final double min_above, // absolute disparity difference
patch_neib, // final int patch_neib){
neibs_pow); // final double neibs_pow);
String [] titles_terr_veg = {"terrain","vegetation"};
ShowDoubleFloatArrays.showArrays(
terr_veg,
tilesX,
tilesY,
true,
quadCLTs[ref_index].getImageName()+"-terrain_vegetation-min_ab"+min_above+".tiff",
titles_terr_veg);
double [] reference_xyz = OpticalFlow.ZERO3;
double [] reference_atr = OpticalFlow.ZERO3;
double [][][] terrain_pXpYD = getPxPyDs(
reference_xyz, // double [] reference_xyz, // offset reference camera {x,y,z}
reference_atr, // double [] reference_atr,
terr_veg[0], // double [] ref_disparity,
quadCLTs, // QuadCLT [] quadCLTs,
null, // boolean [] scene_selection, // null or same length as quadCLTs
ref_index, // int ref_index,
debugLevel); // int debugLevel)
double [][][] vegetation_pXpYD = getPxPyDs(
reference_xyz, // double [] reference_xyz, // offset reference camera {x,y,z}
reference_atr, // double [] reference_atr,
terr_veg[1], // double [] ref_disparity,
quadCLTs, // QuadCLT [] quadCLTs,
null, // boolean [] scene_selection, // null or same length as quadCLTs
ref_index, // int ref_index,
debugLevel); // int debugLevel)
double [][][] terrain_diff = diffPxPyDs(
terrain_pXpYD, // final double [][][] pXpYD,
ref_index); // final int ref_index)
double [][][] vegetation_diff = diffPxPyDs(
vegetation_pXpYD, // final double [][][] pXpYD,
ref_index); // final int ref_index)
int num_scenes = quadCLTs.length;
int num_tiles = terrain_pXpYD[0].length;
int num_pixels = num_tiles*tileSize*tileSize;
boolean show_debug = true;
if (show_debug) {
String [] titles_frame = {"terr-pX","veg-pX","terr-pY","veg-pY","terr-D","veg-D"};
String [] titles_scene = new String [num_scenes];
double [][][] data_dbg = new double [titles_frame.length][num_scenes][num_tiles];
for (int i = 0; i < data_dbg.length;i++) {
for (int j = 0; j < data_dbg[0].length;j++) {
Arrays.fill(data_dbg[i][j], Double.NaN);
}
}
for (int nscene = 0; nscene < num_scenes; nscene++) {
titles_scene[nscene] = nscene+":"+quadCLTs[nscene].getImageName();
if ((terrain_pXpYD[nscene] == null) || (vegetation_pXpYD[nscene]==null)){
System.out.println("test_vegetation(): null for nscene="+nscene);
} else {
for (int ntile = 0; ntile < num_tiles; ntile++) {
if (terrain_pXpYD[nscene][ntile] != null) {
for (int k = 0; k < terrain_pXpYD[nscene][ntile].length; k++) {
data_dbg[2*k + 0][nscene][ntile] = terrain_pXpYD[nscene][ntile][k];
}
}
if (vegetation_pXpYD[nscene][ntile] != null) {
for (int k = 0; k < vegetation_pXpYD[nscene][ntile].length; k++) {
data_dbg[2*k + 1][nscene][ntile] = vegetation_pXpYD[nscene][ntile][k];
}
}
}
}
}
ShowDoubleFloatArrays.showArraysHyperstack(
data_dbg, // double[][][] pixels,
tilesX, // int width,
"terrain_vegetation_pXpYD", // String title, "time_derivs-rt"+diff_time_rt+"-rxy"+diff_time_rxy,
titles_scene, // String [] titles, // all slices*frames titles or just slice titles or null
titles_frame, // String [] frame_titles, // frame titles or null
true); // boolean show)
// same for differences
double [][][] diff_dbg = new double [6][num_scenes][num_tiles];
for (int i = 0; i < diff_dbg.length;i++) {
for (int j = 0; j < diff_dbg[0].length;j++) {
Arrays.fill(diff_dbg[i][j], Double.NaN);
}
}
for (int nscene = 0; nscene < num_scenes; nscene++) {
// titles_scene[nscene] = nscene+":"+quadCLTs[nscene].getImageName();
if ((terrain_diff[nscene] == null) || (vegetation_diff[nscene]==null)){
System.out.println("test_vegetation(): null for nscene="+nscene);
} else {
for (int ntile = 0; ntile < num_tiles; ntile++) {
if (terrain_diff[nscene][ntile] != null) {
for (int k = 0; k < terrain_diff[nscene][ntile].length; k++) {
diff_dbg[2*k + 0][nscene][ntile] = terrain_diff[nscene][ntile][k];
}
}
if (vegetation_diff[nscene][ntile] != null) {
for (int k = 0; k < vegetation_diff[nscene][ntile].length; k++) {
diff_dbg[2*k + 1][nscene][ntile] = vegetation_diff[nscene][ntile][k];
}
}
}
}
}
ShowDoubleFloatArrays.showArraysHyperstack(
diff_dbg, // double[][][] pixels,
tilesX, // int width,
"terrain_vegetation_pXpYD_differetial", // String title, "time_derivs-rt"+diff_time_rt+"-rxy"+diff_time_rxy,
titles_scene, // String [] titles, // all slices*frames titles or just slice titles or null
titles_frame, // String [] frame_titles, // frame titles or null
true); // boolean show)
}
int dbg_scene = -64;
double [][][] terrain_pix = new double [num_scenes][][];
double [][][] vegetation_pix = new double [num_scenes][][];
for (int nscene = 0; nscene < num_scenes; nscene++) {
if (nscene == dbg_scene) {
System.out.println("test_vegetation(): nscene="+nscene);
}
terrain_pix[nscene] = interpolatePxPyDBilinear(
terrain_diff[nscene], // final double [][] pXpYD_tile,
tilesX, // final int tilesX,
tileSize); // final int tile_size)
vegetation_pix[nscene] = interpolatePxPyDBilinear(
vegetation_diff[nscene], // final double [][] pXpYD_tile,
tilesX, // final int tilesX,
tileSize); // final int tile_size)
}
if (show_debug) {
String [] titles_frame = {"terr-pX","veg-pX","terr-pY","veg-pY","terr-D","veg-D"};
String [] titles_scene = new String [num_scenes];
double [][][] data_dbg = new double [titles_frame.length][num_scenes][num_pixels];
for (int i = 0; i < data_dbg.length;i++) {
for (int j = 0; j < data_dbg[0].length;j++) {
Arrays.fill(data_dbg[i][j], Double.NaN);
}
}
for (int nscene = 0; nscene < num_scenes; nscene++) {
titles_scene[nscene] = nscene+":"+quadCLTs[nscene].getImageName();
if ((terrain_pix[nscene] == null) || (vegetation_pix[nscene]==null)){
System.out.println("test_vegetation(): null for nscene="+nscene);
} else {
for (int npix = 0; npix < num_pixels; npix++) {
if (terrain_pix[nscene][npix] != null) {
for (int k = 0; k < terrain_pix[nscene][npix].length; k++) {
data_dbg[2*k + 0][nscene][npix] = terrain_pix[nscene][npix][k];
}
}
if (vegetation_pix[nscene][npix] != null) {
for (int k = 0; k < vegetation_pix[nscene][npix].length; k++) {
data_dbg[2*k + 1][nscene][npix] = vegetation_pix[nscene][npix][k];
}
}
}
}
}
ShowDoubleFloatArrays.showArraysHyperstack(
data_dbg, // double[][][] pixels,
tilesX*tileSize, // int width,
"terrain_vegetation_pix", // String title, "time_derivs-rt"+diff_time_rt+"-rxy"+diff_time_rxy,
titles_scene, // String [] titles, // all slices*frames titles or just slice titles or null
titles_frame, // String [] frame_titles, // frame titles or null
true); // boolean show)
}
double [][][] terrain_inv = new double [num_scenes][][];
double [][][] vegetation_inv = new double [num_scenes][][];
Rectangle out_window = new Rectangle(0,0,640,512);
boolean out_diff = true;
int patch_min_neibs = 6;
int [] pnum_patched = new int[1];
for (int nscene = 0; nscene < num_scenes; nscene++) {
if (nscene == dbg_scene) {
System.out.println("test_vegetation(): nscene="+nscene);
}
terrain_inv[nscene] = invertMap(
terrain_pix[nscene], // final double [][] map_in,
tilesX*tileSize, // final int width,
out_window, // final Rectangle out_window,
true, // final boolean in_diff,
out_diff, // final boolean out_diff)
patch_min_neibs, // final int patch_min_neibs)
pnum_patched); // final int [] pnum_patched)
vegetation_inv[nscene] = invertMap(
vegetation_pix[nscene], // final double [][] map_in,
tilesX*tileSize, // final int width,
out_window, // final Rectangle out_window,
true, // final boolean in_diff,
out_diff, // final boolean out_diff)
patch_min_neibs, // final int patch_min_neibs)
pnum_patched); // final int [] pnum_patched)
}
/* */
double [][][] veg_to_terr = new double [num_scenes][][];
Rectangle window1 = new Rectangle(0,0,640,480);
Rectangle window2 = out_window;
boolean map_diff1 = true;
boolean map_diff2 = out_diff; // true;
boolean map_diff_out = true;
for (int nscene = 0; nscene < num_scenes; nscene++) {
veg_to_terr[nscene] = combineMaps(
terrain_pix[nscene], // final double [][] map1,
window1, // final Rectangle window1,
map_diff1, // final boolean map_diff1,
vegetation_inv[nscene], // final double [][] map2,
window2, // final Rectangle window2,
map_diff2, // final boolean map_diff2,
map_diff_out); // final boolean map_diff_out)
}
/* */
if (show_debug) {
String [] titles_frame = {"terr-pX","veg-pX","terr-pY","veg-pY"};
String [] titles_scene = new String [num_scenes];
double [][][] data_dbg = new double [titles_frame.length][num_scenes][num_pixels];
for (int i = 0; i < data_dbg.length;i++) {
for (int j = 0; j < data_dbg[0].length;j++) {
Arrays.fill(data_dbg[i][j], Double.NaN);
}
}
for (int nscene = 0; nscene < num_scenes; nscene++) {
titles_scene[nscene] = nscene+":"+quadCLTs[nscene].getImageName();
if ((terrain_inv[nscene] == null) || (vegetation_inv[nscene]==null)){
System.out.println("test_vegetation(): null for nscene="+nscene);
} else {
for (int npix = 0; npix < num_pixels; npix++) {
if (terrain_inv[nscene][npix] != null) {
for (int k = 0; k < terrain_inv[nscene][npix].length; k++) {
data_dbg[2*k + 0][nscene][npix] = terrain_inv[nscene][npix][k];
}
}
if (vegetation_inv[nscene][npix] != null) {
for (int k = 0; k < vegetation_inv[nscene][npix].length; k++) {
data_dbg[2*k + 1][nscene][npix] = vegetation_inv[nscene][npix][k];
}
}
}
}
}
ShowDoubleFloatArrays.showArraysHyperstack(
data_dbg, // double[][][] pixels,
tilesX*tileSize, // int width,
"terrain_vegetation_inv", // String title, "time_derivs-rt"+diff_time_rt+"-rxy"+diff_time_rxy,
titles_scene, // String [] titles, // all slices*frames titles or just slice titles or null
titles_frame, // String [] frame_titles, // frame titles or null
true); // boolean show)
showOffsetsDiff(
terrain_inv, // final double [][][] terrain,
vegetation_inv, // final double [][][] vegetation,
tilesX*tileSize, // final int width,
quadCLTs, // QuadCLT [] quadCLTs, // just for names
"terrain_vegetation_offset.tiff"); // String title) { // with .tiff
/* */
showOffsetsCombo(
veg_to_terr, // final double [][][] map_combo,
tilesX*tileSize, // final int width,
quadCLTs, // QuadCLT [] quadCLTs, // just for names
"combo_offset.tiff"); // String title) { // with .tiff
/* */
}
boolean mb_en = clt_parameters.imp.mb_en; // && (fov_tiles==null) && (mode3d > 0);
double mb_max_gain = clt_parameters.imp.mb_max_gain; // 5.0; // motion blur maximal gain (if more - move second point more than a pixel
double [][][] terrain_render = renderDouble(
clt_parameters, // CLTParameters clt_parameters,
mb_en, // boolean mb_en,
mb_max_gain, // double mb_max_gain,
reference_xyz, // double [] reference_xyz, // offset reference camera {x,y,z}
reference_atr, // double [] reference_atr,
terr_veg[0], // double [] ref_disparity,
quadCLTs, // QuadCLT [] quadCLTs,
null, // boolean [] scene_selection, // null or same length as quadCLTs
ref_index, // int ref_index,
terrain_pXpYD, // double [][][] pXpYD,
debugLevel); // int debugLevel){
double [][][] vegetation_render = renderDouble(
clt_parameters, // CLTParameters clt_parameters,
mb_en, // boolean mb_en,
mb_max_gain, // double mb_max_gain,
reference_xyz, // double [] reference_xyz, // offset reference camera {x,y,z}
reference_atr, // double [] reference_atr,
terr_veg[1], // double [] ref_disparity,
quadCLTs, // QuadCLT [] quadCLTs,
null, // boolean [] scene_selection, // null or same length as quadCLTs
ref_index, // int ref_index,
vegetation_pXpYD, // double [][][] pXpYD,
debugLevel); // int debugLevel){
double [][] terrain_mono = new double [num_scenes][];
double [][] vegetation_mono = new double [num_scenes][];
double [][][] terrain_vegetation_all = new double [2][num_scenes+1][];
for (int i = 0; i < num_scenes; i++) {
terrain_mono[i] = terrain_render[i][0];
vegetation_mono[i] = vegetation_render[i][0];
}
System.arraycopy(terrain_mono, 0, terrain_vegetation_all[0], 0, num_scenes);
System.arraycopy(vegetation_mono, 0, terrain_vegetation_all[1], 0, num_scenes);
terrain_vegetation_all[0][num_scenes] = averageMono(terrain_mono);
terrain_vegetation_all[1][num_scenes] = averageMono(vegetation_mono);
if (show_debug) {
String [] titles_frame = {"terrain","vegetation"};
String [] titles_scene = new String [num_scenes+1];
for (int nscene = 0; nscene < num_scenes; nscene++) {
titles_scene[nscene] = nscene+":"+quadCLTs[nscene].getImageName();
}
titles_scene[num_scenes] = "average";
ShowDoubleFloatArrays.showArraysHyperstack(
terrain_vegetation_all, // double[][][] pixels,
tilesX * tileSize, // int width,
"terrain_vegetation_render.tiff", // String title, "time_derivs-rt"+diff_time_rt+"-rxy"+diff_time_rxy,
titles_scene, // String [] titles, // all slices*frames titles or just slice titles or null
titles_frame, // String [] frame_titles, // frame titles or null
true); // boolean show)
}
return;
}
public static ImagePlus showOffsetsCombo(
final double [][][] map_combo,
final int width,
QuadCLT [] quadCLTs, // just for names
String title) { // with .tiff
final int num_scenes = map_combo.length;
final int num_pixels = map_combo[0].length;
final String [] titles_top = {"dist","dx","dy"};
String [] titles_scene = new String[num_scenes];
final double [][][] img_data = new double [ titles_top.length][num_scenes][num_pixels];
final Thread[] threads = ImageDtt.newThreadArray(QuadCLT.THREADS_MAX);
final AtomicInteger ai = new AtomicInteger(0);
for (int nscene = 0; nscene < num_scenes; nscene++) {
titles_scene[nscene] = nscene+":"+quadCLTs[nscene].getImageName();
final int fscene = nscene;
ai.set(0);
for (int ntype = 0; ntype < img_data.length; ntype++) {
Arrays.fill(img_data[ntype][nscene], Double.NaN);
}
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nPix = ai.getAndIncrement(); nPix < num_pixels; nPix = ai.getAndIncrement()) if (map_combo[fscene][nPix] != null){
double dx = map_combo[fscene][nPix][0];
double dy = map_combo[fscene][nPix][1];
double d = Math.sqrt(dx*dx+dy*dy);
img_data[0][fscene][nPix] = d;
img_data[1][fscene][nPix] = dx;
img_data[2][fscene][nPix] = dy;
}
}
};
}
ImageDtt.startAndJoin(threads);
}
ImagePlus imp = ShowDoubleFloatArrays.showArraysHyperstack(
img_data, // double[][][] pixels,
width, // int width,
title, // "terrain_vegetation_render.tiff", // String title, "time_derivs-rt"+diff_time_rt+"-rxy"+diff_time_rxy,
titles_scene, // String [] titles, // all slices*frames titles or just slice titles or null
titles_top, // String [] frame_titles, // frame titles or null
true); // boolean show)
return imp;
}
public static ImagePlus showOffsetsDiff(
final double [][][] terrain,
final double [][][] vegetation,
final int width,
QuadCLT [] quadCLTs, // just for names
String title) { // with .tiff
final int num_scenes = terrain.length;
final int num_pixels = terrain[0].length;
final String [] titles_top = {"dist","dx","dy"};
String [] titles_scene = new String[num_scenes];
final double [][][] img_data = new double [ titles_top.length][num_scenes][num_pixels];
final Thread[] threads = ImageDtt.newThreadArray(QuadCLT.THREADS_MAX);
final AtomicInteger ai = new AtomicInteger(0);
for (int nscene = 0; nscene < num_scenes; nscene++) {
titles_scene[nscene] = nscene+":"+quadCLTs[nscene].getImageName();
final int fscene = nscene;
ai.set(0);
for (int ntype = 0; ntype < img_data.length; ntype++) {
Arrays.fill(img_data[ntype][nscene], Double.NaN);
}
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
// TileNeibs tn = new TileNeibs(out_window.width, out_window.height);
for (int nPix = ai.getAndIncrement(); nPix < num_pixels; nPix = ai.getAndIncrement()) if ((terrain[fscene][nPix] != null) && (vegetation[fscene][nPix] != null)){
double dx = vegetation[fscene][nPix][0] - terrain[fscene][nPix][0];
double dy = vegetation[fscene][nPix][1] - terrain[fscene][nPix][1];
double d = Math.sqrt(dx*dx+dy*dy);
img_data[0][fscene][nPix] = d;
img_data[1][fscene][nPix] = dx;
img_data[2][fscene][nPix] = dy;
}
}
};
}
ImageDtt.startAndJoin(threads);
}
ImagePlus imp = ShowDoubleFloatArrays.showArraysHyperstack(
img_data, // double[][][] pixels,
width, // int width,
title, // "terrain_vegetation_render.tiff", // String title, "time_derivs-rt"+diff_time_rt+"-rxy"+diff_time_rxy,
titles_scene, // String [] titles, // all slices*frames titles or just slice titles or null
titles_top, // String [] frame_titles, // frame titles or null
true); // boolean show)
return imp;
}
/**
* Combine maps: map1 and map2 (map2 transforms result of map1)
* @param map1 first map defined for a grid, each element is either null or a pair {mapped_X, mapped_Y}
* @param window1 Rectangle window for the first map ([window1.width*window1.height], width*window1.x
* and window1.y specifies a zero point if the map is differential
* @param map_diff1 true if map1 is differential, specifying relative x,y, not absolute ones
* @param map2 second map to be combined (applied to the results of map1
* @param window2 similar to window1 for the second map
* @param map_diff2 true if map2 is differential, specifying relative x,y, not absolute ones
* @param map_diff_out generate differential map rather than absolute one
* @return map that combines map1 and map2 using linear interpolation.
*/
public static double [][] combineMaps(
final double [][] map1,
final Rectangle window1,
final boolean map_diff1,
final double [][] map2,
final Rectangle window2,
final boolean map_diff2,
final boolean map_diff_out) {
final int odepth = 2; // just x,y. if 3 - will have 0 for disparity
final int num_pixels1 = window1.width*window1.height;
final double [][] combo_map = new double [num_pixels1][];
final Thread[] threads = ImageDtt.newThreadArray(QuadCLT.THREADS_MAX);
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nPix = ai.getAndIncrement(); nPix < num_pixels1; nPix = ai.getAndIncrement()) if (map1[nPix] != null){
pix_label: {
int ix = nPix % window1.width;
int iy = nPix / window1.width;
double [] pxy = map1[nPix].clone();
if (map_diff1) {
pxy[0] += 0.5 + ix - window1.x;
pxy[1] += 0.5 + iy - window1.y;
}
pxy[0] += window2.x;
pxy[1] += window2.y;
int x0 = (int) Math.floor(pxy[0]);
int y0 = (int) Math.floor(pxy[1]);
if ((x0 < 0) || (y0 < 0) || (x0 >= (window2.width -1)) || (y0 >= (window2.height-1))) {
break pix_label; // all 4 corners should fit
}
int opix00 = x0+ y0*window2.width;
double [][][] corners = {
{map2[opix00], map2[opix00 + 1]},
{map2[opix00 + window2.width], map2[opix00 + window2.width + 1]}};
for (int dy = 0; dy < 2; dy++) {
for (int dx = 0; dx < 2; dx++) {
double [] corner = corners[dy][dx];
if (corner == null) {
break pix_label; // all 4 corners should be defined
}
corners[dy][dx] = corners[dy][dx].clone();
corner = corners[dy][dx];
if (map_diff2) {
corner[0] += x0 + dx + 0.5 - window2.x;
corner[1] += y0 + dy + 0.5 - window2.y;
}
}
}
double fx = pxy[0] - x0;
double fy = pxy[1] - y0;
double [] cxy = new double [odepth];
for (int i = 0; i < odepth; i++) {
cxy[i] =
(1-fx)*(1-fy)*corners[0][0][i] +
( fx)*(1-fy)*corners[0][1][i] +
(1-fx)*( fy)*corners[1][0][i] +
( fx)*( fy)*corners[1][1][i];
}
if (map_diff_out) {
cxy[0] -= 0.5 + ix - window1.x;
cxy[1] -= 0.5 + iy - window1.y;
}
combo_map[nPix] = cxy;
}
}
}
};
}
ImageDtt.startAndJoin(threads);
return combo_map;
}
/**
* Invert map that defines x,y for the uniform grid
* @param map_in each pixel is null or {X,Y,...}
* @param width input map width
* @param out_window Rectangle, width and height define output size, x and y - coordinates of the
* output point corresponding to X=0, Y=0 of the input data
* @param in_diff input map is differential, add x+0.5, y+0.5 to X,Y
* @param out_diff make output map differential by subtracting (x-out_window.x-0.5) from output x,
* and (y-out_window.y-0.5) from output y;
* @param patch_min_neibs patch holes that have at least this number of defined neighbors (do not patch if 0)
* @param pnum_patched - null or int[1] to return number of patched tiles;
* @return per-pixel array of {pX,pY} pairs (or nulls) for each output pixel in line-scal order
*/
public static double [][] invertMap(
final double [][] map_in,
final int width,
final Rectangle out_window,
final boolean in_diff,
final boolean out_diff,
final int patch_min_neibs,
final int [] pnum_patched){
final int dbg_opix = (width < 0) ? 1: (-(30+36*640)); // just to remove warning when not used
final int odepth = 2; // just x,y. if 3 - will have 0 for disparity
final int height = map_in.length/width;
final int num_opixels = out_window.width * out_window.height;
final double [][] map_out = new double [num_opixels][];
final Thread[] threads = ImageDtt.newThreadArray(QuadCLT.THREADS_MAX);
final AtomicInteger ai = new AtomicInteger(0);
final AtomicInteger anum_patched = new AtomicInteger(0);
if (pnum_patched !=null) {
pnum_patched[0] = 0;
}
// use multiple passes to avoid unlikely races
for (int offs_y = 0; offs_y < 2; offs_y++) {
final int height2 = (height + 1 - offs_y);
for (int offs_x = 0; offs_x < 2; offs_x++) {
final int width2 = (width + 1 - offs_x);
final int num_pix2 = height2 * width2;
final int [] offs = {offs_x,offs_y};
ai.set(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
double [][] corners_cw = new double [4][2];
int [][] bounds = new int[2][2]; // [y_not_x][max_not_min]
for (int nPix2 = ai.getAndIncrement(); nPix2 < num_pix2; nPix2 = ai.getAndIncrement()) {
int ix0 =offs[0] + 2 * (nPix2 % width2);
int iy0 =offs[1] + 2 * (nPix2 / width2);
boolean has_nan = false;
for (int dir = 0; dir < corners_cw.length; dir++) {
int ix1 = ix0 + CORN_CW_OFFS[dir][0];
int iy1 = iy0 + CORN_CW_OFFS[dir][1];
if ((ix1 >= width) || (iy1 >= height) || (map_in[ix1+width*iy1] == null)) {
has_nan = true;
break;
}
int in_indx = ix1+width*iy1;
for (int ynx = 0; ynx < 2; ynx++) {
corners_cw[dir][ynx] = map_in[in_indx][ynx];
}
if (in_diff) {
corners_cw[dir][0] += 0.5 + ix1;
corners_cw[dir][1] += 0.5 + iy1;
}
corners_cw[dir][0] += out_window.x;
corners_cw[dir][1] += out_window.y;
}
if (has_nan) {
continue;
}
// all 4 corners are defined;
bounds[0][0] = (int) Math.floor(corners_cw[0][0]);
bounds[1][0] = (int) Math.floor(corners_cw[0][1]);
bounds[0][1] = bounds[0][0];
bounds[1][1] = bounds[1][0];
for (int dir = 1; dir < corners_cw.length; dir++) {
for (int ynx = 0; ynx < 2; ynx++) {
int mn = (int) Math.floor(corners_cw[dir][ynx]);
int mx = (int) Math.ceil (corners_cw[dir][ynx]);
if (mn < bounds[ynx][0]) {
bounds[ynx][0] = mn;
}
if (mx > bounds[ynx][1]) {
bounds[ynx][1] = mx;
}
}
}
if (bounds[0][0] < 0) bounds[0][0] = 0;
if (bounds[1][0] < 0) bounds[1][0] = 0;
if (bounds[0][1] >= out_window.width) bounds[0][1] = out_window.width - 1;
if (bounds[1][1] >= out_window.height) bounds[1][1] = out_window.height - 1;
if ((bounds[0][0] > bounds[0][1]) || (bounds[1][0] > bounds[1][1])) {
continue; // completely outside output window
}
if (dbg_opix >= 0) {
int dbg_ox = dbg_opix % out_window.width;
int dbg_oy = dbg_opix / out_window.width;
if ((dbg_ox >= bounds[0][0]) && (dbg_ox <= bounds[0][1]) && (dbg_oy >= bounds[1][0]) && (dbg_oy <= bounds[1][1])) {
System.out.println("invertMap(): "+bounds[0][0]+"<=ox<="+bounds[0][1]);
System.out.println("invertMap(): "+bounds[1][0]+"<=oy<="+bounds[1][1]);
}
}
double [] v10 = {corners_cw[1][0]-corners_cw[0][0], corners_cw[1][1]-corners_cw[0][1]};
double [] v30 = {corners_cw[3][0]-corners_cw[0][0], corners_cw[3][1]-corners_cw[0][1]};
double [] v12 = {corners_cw[1][0]-corners_cw[2][0], corners_cw[1][1]-corners_cw[2][1]};
double [] v32 = {corners_cw[3][0]-corners_cw[2][0], corners_cw[3][1]-corners_cw[2][1]};
double l2_10 = v10[0]*v10[0]+v10[1]*v10[1];
double l2_30 = v30[0]*v30[0]+v30[1]*v30[1];
double l2_12 = v12[0]*v12[0]+v12[1]*v12[1];
double l2_32 = v32[0]*v32[0]+v32[1]*v32[1];
for (int oy = bounds[1][0]; oy <= bounds[1][1]; oy++) {
for (int ox = bounds[0][0]; ox <= bounds[0][1]; ox++) {
// see if it is inside 4 corners
double [] vp0 = {ox-corners_cw[0][0], oy-corners_cw[0][1]};
if (vp0[0]*v10[1] > vp0[1]*v10[0]) {
continue;
}
if (vp0[0]*v30[1] < vp0[1]*v30[0]) {
continue;
}
double [] vp2 = {ox-corners_cw[2][0], oy-corners_cw[2][1]};
if (vp2[0]*v12[1] < vp2[1]*v12[0]) {
continue;
}
if (vp2[0]*v32[1] > vp2[1]*v32[0]) {
continue;
}
// inside, now interpolate. First get effective coordinates
double lp0 = Math.sqrt(vp0[0]*vp0[0]+vp0[1]*vp0[1]);
double lp2 = Math.sqrt(vp2[0]*vp2[0]+vp2[1]*vp2[1]);
double u0 = (vp0[0]*v10[0] + vp0[1]*v10[1])/l2_10;
double v0 = (vp0[0]*v30[0] + vp0[1]*v30[1])/l2_30;
double u1 = 1.0 - (vp2[0]*v32[0] + vp2[1]*v32[1])/l2_32;
double v1 = 1.0 - (vp2[0]*v12[0] + vp2[1]*v12[1])/l2_12;
// Use arithmetic average as some of u0,u1,v0,v1 can be small negatives
double u = 0.5 * (u0 + u1);
double v = 0.5 * (v0 + v1);
int oindx = ox + oy*out_window.width;
map_out[oindx] = new double [odepth];
map_out[oindx][0] = ix0 + u;
map_out[oindx][1] = iy0 + v;
if (out_diff) {
map_out[oindx][0] -= ox-out_window.x-0.5;
map_out[oindx][1] -= oy-out_window.y-0.5;
}
}
}
}
}
};
}
ImageDtt.startAndJoin(threads);
}
}
if (patch_min_neibs > 0) {
ai.set(0);
final double [][] map_out_filtered = new double [num_opixels][];
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
TileNeibs tn = new TileNeibs(out_window.width, out_window.height);
for (int nPix = ai.getAndIncrement(); nPix < num_opixels; nPix = ai.getAndIncrement()) if (map_out[nPix] != null){
map_out_filtered[nPix] = map_out[nPix]; // no need to clone;
} else {
int num_neibs = 0;
for (int dir = 0; dir< TileNeibs.DIRS; dir++) {
int nPix1 = tn.getNeibIndex(nPix, dir);
if ((nPix1 >=0) && (map_out[nPix1] != null)){
num_neibs ++;
}
}
if (num_neibs >= patch_min_neibs) {
map_out_filtered[nPix] = new double [odepth];
for (int dir = 0; dir< TileNeibs.DIRS; dir++) {
int nPix1 = tn.getNeibIndex(nPix, dir);
if ((nPix1 >=0) && (map_out[nPix1] != null)){
for (int i = 0; i < odepth; i++) {
map_out_filtered[nPix][i] += map_out[nPix1][i];
}
}
}
for (int i = 0; i < odepth; i++) {
map_out_filtered[nPix][i] /= num_neibs;
}
anum_patched.getAndIncrement();
}
}
}
};
}
ImageDtt.startAndJoin(threads);
if (anum_patched.get() > 0) {
// System.out.println("invertMap(): patched "+anum_patched.get()+" null pixels.");
if (pnum_patched !=null) {
pnum_patched[0] = anum_patched.get();
}
}
return map_out_filtered;
}
// patch possible holes that appeared on the borders?
return map_out;
}
/**
* Expand defined tile pXpYD so each defined tile has at least 3 consecutive neighbors: 2 ortho and diagonal between them
* @param pXpYD_tile
* @param tilesX
* @param tile_size
* @return
*/
public static double [][] interpolatePxPyDBilinear(
final double [][] pXpYD_tile,
final int tilesX,
final int tile_size){
final int width = tilesX * tile_size;
final int htile_size = tile_size/2;
final double pix_step = 1.0/tile_size;
int num_tiles = pXpYD_tile.length;
int num_pixels = num_tiles * tile_size * tile_size;
final int tilesY = num_tiles/tilesX;
final double [][] pXpYD_pixel = new double [num_pixels][];
final int tilesXE = tilesX+1;
final int tilesYE = tilesY+1;
final int num_tiles_ext = tilesXE*tilesYE;
final int dbg_tile = -(20+46*80);
final Thread[] threads = ImageDtt.newThreadArray(QuadCLT.THREADS_MAX);
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
TileNeibs tn = new TileNeibs(tilesX,tilesY);
double [][] pXpYD_neibs = new double[TileNeibs.DIRS+1][];
for (int ntile = ai.getAndIncrement(); ntile < num_tiles; ntile = ai.getAndIncrement()) if (pXpYD_tile[ntile] != null){
if (ntile== dbg_tile) {
System.out.println("interpolatePxPyDBilinear(): nTile="+ntile);
}
Arrays.fill(pXpYD_neibs, null);
pXpYD_neibs[8] = pXpYD_tile[ntile];
int tileX = ntile % tilesX;
int tileY = ntile / tilesX;
int num_defined = 0;
int num_ortho = 0;
for (int dir = 0; dir< TileNeibs.DIRS; dir++) {
int ntile1 = tn.getNeibIndex(ntile, dir);
if ((ntile1 >= 0) && (pXpYD_tile[ntile1] != null)) {
pXpYD_neibs[dir] = pXpYD_tile[ntile1];
num_defined++;
if ((dir & 1)==0) num_ortho++;
}
}
// extrapolate to fill missing
if (num_defined < TileNeibs.DIRS) {
for (int dir = 0; dir< TileNeibs.DIRS; dir++) if ((pXpYD_neibs[dir] == null) && (pXpYD_neibs[(dir + 4) % 8] != null)){
double [] pXpYD_opposite = pXpYD_neibs[(dir + 4) % 8];
num_defined++;
if ((dir & 1)==0) num_ortho++;
pXpYD_neibs[dir] = new double [PXPYD_LEN];
for (int i = 0; i < PXPYD_LEN; i++) {
pXpYD_neibs[dir][i] = 2 * pXpYD_neibs[8][i] - pXpYD_opposite[i];
}
}
}
// any ortho are still not filled - juxt copy the central one
if (num_ortho < 4) {
for (int dir = 0; dir< TileNeibs.DIRS; dir+=2) if (pXpYD_neibs[dir] == null) {
pXpYD_neibs[dir] = pXpYD_neibs[8].clone();
num_ortho++;
}
}
// ortho opposite always exist, only diagonal can be missing (like in the corner (x=0, y=0)
if (num_defined < TileNeibs.DIRS) { // all ortho are already filled
for (int dir = 1; dir< TileNeibs.DIRS; dir += 2) if (pXpYD_neibs[dir] == null) {
double [] pXpYD1 = pXpYD_neibs[(dir + 7) % 8];
double [] pXpYD2 = pXpYD_neibs[(dir + 1) % 8];
pXpYD_neibs[dir] = new double [PXPYD_LEN];
for (int i = 0; i < PXPYD_LEN; i++) {
pXpYD_neibs[dir][i] = pXpYD1[i] + pXpYD2[i] - pXpYD_neibs[8][i];
}
}
}
// all 8 are now filled in, can fill data in 4 squares
int xc = htile_size + tile_size * tileX;
int yc = htile_size + tile_size * tileY;
// double fx0 = 0.5 + 0.5/tile_size;
// double fy0 = 0.5 + 0.5/tile_size;
for (int dir = 0; dir < 4; dir++) {
double [] pxy00 = pXpYD_neibs[FOUR_CORNERS_Z[dir][0]];
double [] pxy01 = pXpYD_neibs[FOUR_CORNERS_Z[dir][1]];
double [] pxy10 = pXpYD_neibs[FOUR_CORNERS_Z[dir][2]];
double [] pxy11 = pXpYD_neibs[FOUR_CORNERS_Z[dir][3]];
int ix0 = xc + XY_OFFS[dir][0] * htile_size; // absolute pixel X of the top-left corner of the quadrant
int iy0 = yc + XY_OFFS[dir][1] * htile_size; // absolute pixel Y of the top-left corner of the quadrant
double fx0 = 0.5*(pix_step - XY_OFFS[dir][0]);
double fy0 = 0.5*(pix_step - XY_OFFS[dir][1]);
for (int y = 0; y < htile_size; y++) {
double fy = fy0 + y * pix_step;
int iy = iy0 + y;
for (int x = 0; x < htile_size; x++) {
int ix = ix0 + x;
int iPix = iy * width + ix;
double fx = fx0 + x * pix_step;
pXpYD_pixel[iPix] = new double [PXPYD_LEN];
for (int i = 0; i < PXPYD_LEN; i++) {
pXpYD_pixel[iPix][i] =
(1-fy)*(1-fx) * pxy00[i] +
(1-fy)*( fx) * pxy01[i] +
( fy)*(1-fx) * pxy10[i] +
( fy)*( fx) * pxy11[i];
}
}
}
}
}
}
};
}
ImageDtt.startAndJoin(threads);
return pXpYD_pixel;
}
public static double [] averageMono(
final double [][] data) {
final int num_scenes = data.length;
final int num_pixels = data[0].length;
final double [] average_data = new double[num_pixels];
Arrays.fill(average_data, Double.NaN);
final double [] weights = new double[num_pixels];
final Thread[] threads = ImageDtt.newThreadArray(QuadCLT.THREADS_MAX);
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nPix = ai.getAndIncrement(); nPix < num_pixels; nPix = ai.getAndIncrement()) {
double swd = 0, sw = 0;
for (int nscene = 0; nscene < num_scenes; nscene++) {
double d = data[nscene][nPix];
if (!Double.isNaN(d)) {
swd += d;
sw += 1;
}
}
if (sw > 0) {
average_data[nPix] = swd/sw;
}
}
}
};
}
ImageDtt.startAndJoin(threads);
return average_data;
}
public static double [][][] getPxPyDs(
/// CLTParameters clt_parameters,
// boolean mb_en,
// double mb_max_gain,
// Rectangle fov_tiles,
double [] reference_xyz, // offset reference camera {x,y,z}
double [] reference_atr,
double [] ref_disparity,
QuadCLT [] quadCLTs,
boolean [] scene_selection, // null or same length as quadCLTs
int ref_index,
int debugLevel){
// boolean mb_en = clt_parameters.imp.mb_en; // && (fov_tiles==null) && (mode3d > 0);
// double mb_tau = clt_parameters.imp.mb_tau; // 0.008; // time constant, sec
// double mb_max_gain = clt_parameters.imp.mb_max_gain; // 5.0; // motion blur maximal gain (if more - move second point more than a pixel
ErsCorrection ers_reference = quadCLTs[ref_index].getErsCorrection();
int dbg_scene = -95;
double [][][] pXpYD = new double [quadCLTs.length][][];
for (int nscene = 0; nscene < quadCLTs.length ; nscene++) if ((quadCLTs[nscene] != null) && ((scene_selection==null) || scene_selection[nscene])){
if (nscene== dbg_scene) {
System.out.println("renderSceneSequence(): nscene = "+nscene);
}
String ts = quadCLTs[nscene].getImageName();
double [] scene_xyz = OpticalFlow.ZERO3;
double [] scene_atr = OpticalFlow.ZERO3;
if (nscene != ref_index) { // Check even for raw, so video frames will match in all modes
scene_xyz = ers_reference.getSceneXYZ(ts);
scene_atr = ers_reference.getSceneATR(ts);
if ((scene_atr==null) || (scene_xyz == null)) {
continue;
}
double [] scene_ers_xyz_dt = ers_reference.getSceneErsXYZ_dt(ts);
double [] scene_ers_atr_dt = ers_reference.getSceneErsATR_dt(ts);
quadCLTs[nscene].getErsCorrection().setErsDt(
scene_ers_xyz_dt, // double [] ers_xyz_dt,
scene_ers_atr_dt); // double [] ers_atr_dt)(ers_scene_original_xyz_dt);
}
if (reference_xyz != null) { // offset all, including reference scene
double [][] combo_xyzatr = ErsCorrection.combineXYZATR(
reference_xyz, // double [] reference_xyz,
reference_atr, // double [] reference_atr,
scene_xyz, // double [] scene_xyz,
scene_atr); // double [] scene_atr)
scene_xyz = combo_xyzatr[0];
scene_atr = combo_xyzatr[1];
}
// double [][] dxyzatr_dt = null;
// should get velocities from HashMap at reference scene from timestamp , not re-calculate.
// if (mb_en) {
// dxyzatr_dt = new double[][] { // for all, including ref
// quadCLTs[nscene].getErsCorrection().getErsXYZ_dt(),
// quadCLTs[nscene].getErsCorrection().getErsATR_dt()};
// }
// No use of ERS !
pXpYD[nscene] = QuadCLT.getScenePxPyD(
null, // final Rectangle full_woi_in, // show larger than sensor WOI in tiles (or null)
ref_disparity, // double [] disparity_ref,
scene_xyz, // final double [] scene_xyz, // camera center in world coordinates
scene_atr, // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs[nscene], // final QuadCLT scene,
quadCLTs[ref_index]); // final QuadCLT ref_scene, // now - may be null - for testing if scene is rotated ref
} // for (int nscene
return pXpYD;
}
public static double [][][] diffPxPyDs(
final double [][][] pXpYD,
final int ref_index){
final int num_scenes = pXpYD.length;
final int num_pixels = pXpYD[0].length;
final double [][][] diff_pXpYD = new double [num_scenes][num_pixels][];
final Thread[] threads = ImageDtt.newThreadArray(QuadCLT.THREADS_MAX);
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nPix = ai.getAndIncrement(); nPix < num_pixels; nPix = ai.getAndIncrement()) if (pXpYD[ref_index][nPix] != null){ // probably always
int num_comp = pXpYD[ref_index][nPix].length;
for (int nscene =0; nscene < num_scenes; nscene++) if ((pXpYD[nscene] != null) && (pXpYD[nscene][nPix] != null)){
diff_pXpYD[nscene][nPix] = new double [num_comp];
for (int i = 0; i < num_comp; i++) {
diff_pXpYD[nscene][nPix][i] = pXpYD[nscene][nPix][i] - pXpYD[ref_index][nPix][i];
}
}
}
}
};
}
ImageDtt.startAndJoin(threads);
return diff_pXpYD;
}
public static double [][][] renderDouble(
CLTParameters clt_parameters,
boolean mb_en,
double mb_max_gain,
// Rectangle fov_tiles,
double [] reference_xyz, // offset reference camera {x,y,z}
double [] reference_atr,
double [] ref_disparity,
QuadCLT [] quadCLTs,
boolean [] scene_selection, // null or same length as quadCLTs
int ref_index,
double [][][] pXpYD,
int debugLevel){
// boolean mb_en = clt_parameters.imp.mb_en; // && (fov_tiles==null) && (mode3d > 0);
double mb_tau = clt_parameters.imp.mb_tau; // 0.008; // time constant, sec
// double mb_max_gain = clt_parameters.imp.mb_max_gain; // 5.0; // motion blur maximal gain (if more - move second point more than a pixel
ErsCorrection ers_reference = quadCLTs[ref_index].getErsCorrection();
int dbg_scene = -95;
double [][][] double_render = new double[quadCLTs.length][][];
double [][] ref_pXpYD = OpticalFlow.transformToScenePxPyD( // now should work with offset ref_scene
null, // fov_tiles, // final Rectangle [] extra_woi, // show larger than sensor WOI (or null)
ref_disparity, // final double [] disparity_ref, // invalid tiles - NaN in disparity
OpticalFlow.ZERO3, // final double [] scene_xyz, // camera center in world coordinates
OpticalFlow.ZERO3, // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs[ref_index], // final QuadCLT scene_QuadClt,
quadCLTs[ref_index], // final QuadCLT reference_QuadClt, // now - may be null - for testing if scene is rotated ref
QuadCLT.THREADS_MAX); // int threadsMax)
for (int nscene = 0; nscene < quadCLTs.length ; nscene++) if ((quadCLTs[nscene] != null) && ((scene_selection==null) || scene_selection[nscene])){
if (nscene== dbg_scene) {
System.out.println("renderSceneSequence(): nscene = "+nscene);
}
String ts = quadCLTs[nscene].getImageName();
double [] scene_xyz = OpticalFlow.ZERO3;
double [] scene_atr = OpticalFlow.ZERO3;
if (nscene != ref_index) { // Check even for raw, so video frames will match in all modes
scene_xyz = ers_reference.getSceneXYZ(ts);
scene_atr = ers_reference.getSceneATR(ts);
if ((scene_atr==null) || (scene_xyz == null)) {
continue;
}
double [] scene_ers_xyz_dt = ers_reference.getSceneErsXYZ_dt(ts);
double [] scene_ers_atr_dt = ers_reference.getSceneErsATR_dt(ts);
quadCLTs[nscene].getErsCorrection().setErsDt(
scene_ers_xyz_dt, // double [] ers_xyz_dt,
scene_ers_atr_dt); // double [] ers_atr_dt)(ers_scene_original_xyz_dt);
}
if (reference_xyz != null) { // offset all, including reference scene
double [][] combo_xyzatr = ErsCorrection.combineXYZATR(
reference_xyz, // double [] reference_xyz,
reference_atr, // double [] reference_atr,
scene_xyz, // double [] scene_xyz,
scene_atr); // double [] scene_atr)
scene_xyz = combo_xyzatr[0];
scene_atr = combo_xyzatr[1];
}
double [][] dxyzatr_dt = null;
// should get velocities from HashMap at reference scene from timestamp , not re-calculate.
if (mb_en) {
dxyzatr_dt = new double[][] { // for all, including ref
quadCLTs[nscene].getErsCorrection().getErsXYZ_dt(),
quadCLTs[nscene].getErsCorrection().getErsATR_dt()};
}
double [][] motion_blur = null;
if (mb_en && (dxyzatr_dt != null)) {
motion_blur = OpticalFlow.getMotionBlur(
quadCLTs[ref_index], // QuadCLT ref_scene,
quadCLTs[nscene], // QuadCLT scene, // can be the same as ref_scene
ref_pXpYD, // double [][] ref_pXpYD, // here it is scene, not reference!
scene_xyz, // double [] camera_xyz,
scene_atr, // double [] camera_atr,
dxyzatr_dt[0], // double [] camera_xyz_dt,
dxyzatr_dt[1], // double [] camera_atr_dt,
0, // int shrink_gaps, // will gaps, but not more that grow by this
debugLevel); // int debug_level)
}
double_render[nscene] = QuadCLT.renderDoubleGPUFromDSI(
-1, // final int sensor_mask,
null, // final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters, // CLTParameters clt_parameters,
ref_disparity, // double [] disparity_ref,
// motion blur compensation
mb_tau, // double mb_tau, // 0.008; // time constant, sec
mb_max_gain, // double mb_max_gain, // 5.0; // motion blur maximal gain (if more - move second point more than a pixel
motion_blur, // double [][] mb_vectors, //
// scene_xyz, // final double [] scene_xyz, // camera center in world coordinates
// scene_atr, // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs[nscene], // final QuadCLT scene,
quadCLTs[ref_index], // final QuadCLT ref_scene, // now - may be null - for testing if scene is rotated ref
clt_parameters.imp.show_mono_nan, //final boolean show_nan,
pXpYD[nscene], // double [][] pXpYD,
debugLevel); // int debugLevel)
} // for (int nscene
return double_render;
}
}
...@@ -60,6 +60,7 @@ import com.elphel.imagej.ims.Did_ins_2; ...@@ -60,6 +60,7 @@ import com.elphel.imagej.ims.Did_ins_2;
import com.elphel.imagej.ims.Did_pimu; import com.elphel.imagej.ims.Did_pimu;
import com.elphel.imagej.ims.Imx5; import com.elphel.imagej.ims.Imx5;
import com.elphel.imagej.jp4.JP46_Reader_camera; import com.elphel.imagej.jp4.JP46_Reader_camera;
import com.elphel.imagej.orthomosaic.VegetationModel;
import com.elphel.imagej.tileprocessor.sfm.StructureFromMotion; import com.elphel.imagej.tileprocessor.sfm.StructureFromMotion;
import Jama.Matrix; import Jama.Matrix;
...@@ -5685,6 +5686,23 @@ public class OpticalFlow { ...@@ -5685,6 +5686,23 @@ public class OpticalFlow {
// generates 3-d modes, colors, stereos, tiffs/videos // generates 3-d modes, colors, stereos, tiffs/videos
// Testing vegetation, for debugging supposing that terrain layer is already set in *-INTER-INTRA-LMA.tiff - normally it is only set during 3d model generation
boolean test_vegetation = true;
if (test_vegetation) { // limit start of the quadCLTs by reading start/end from the reference scene
VegetationModel.test_vegetation(
clt_parameters, // CLTParameters clt_parameters,
quadCLTs, // QuadCLT [] quadCLTs,
ref_index, // int ref_index,
debugLevel); // int debugLevel)
if (videos != null) {
videos[0] = new String[0];
}
// temporarily - exiting now
return quadCLTs[ref_index].getX3dTopDirectory();
}
if (generate_mapped || reuse_video) { // modifies combo_dsn_final ? if (generate_mapped || reuse_video) { // modifies combo_dsn_final ?
int tilesX = quadCLTs[ref_index].getTileProcessor().getTilesX(); int tilesX = quadCLTs[ref_index].getTileProcessor().getTilesX();
int tilesY = quadCLTs[ref_index].getTileProcessor().getTilesY(); int tilesY = quadCLTs[ref_index].getTileProcessor().getTilesY();
......
...@@ -1471,6 +1471,172 @@ public class QuadCLT extends QuadCLTCPU { ...@@ -1471,6 +1471,172 @@ public class QuadCLT extends QuadCLTCPU {
return imp_render; return imp_render;
} }
public static double [][] getScenePxPyD(
final Rectangle full_woi_in, // show larger than sensor WOI in tiles (or null)
double [] disparity_ref,
double [] scene_xyz, // camera center in world coordinates. If null - no shift, no ers
double [] scene_atr, // camera orientation relative to world frame
final QuadCLT scene,
final QuadCLT ref_scene){ // now - may be null - for testing if scene is rotated ref
double [][] pXpYD;
if ((scene_xyz == null) || (scene_atr == null)) {
scene_xyz = new double[3];
scene_atr = new double[3];
pXpYD=OpticalFlow.transformToScenePxPyD( // now should work with offset ref_scene
full_woi_in, // final Rectangle [] extra_woi, // show larger than sensor WOI (or null)
disparity_ref, // final double [] disparity_ref, // invalid tiles - NaN in disparity
scene_xyz, // final double [] scene_xyz, // camera center in world coordinates
scene_atr, // final double [] scene_atr, // camera orientation relative to world frame
ref_scene, // final QuadCLT scene_QuadClt,
ref_scene, // final QuadCLT reference_QuadClt, // now - may be null - for testing if scene is rotated ref
THREADS_MAX); // int threadsMax)
} else {
pXpYD=OpticalFlow.transformToScenePxPyD( // now should work with offset ref_scene
full_woi_in, // final Rectangle [] extra_woi, // show larger than sensor WOI (or null)
disparity_ref, // final double [] disparity_ref, // invalid tiles - NaN in disparity
scene_xyz, // final double [] scene_xyz, // camera center in world coordinates
scene_atr, // final double [] scene_atr, // camera orientation relative to world frame
scene, // final QuadCLT scene_QuadClt,
ref_scene, // final QuadCLT reference_QuadClt, // now - may be null - for testing if scene is rotated ref
THREADS_MAX); // int threadsMax)
}
return pXpYD;
}
public static double [][] renderDoubleGPUFromDSI( // {scene}{color}{pixel}
final int sensor_mask,
final Rectangle full_woi_in, // show larger than sensor WOI in tiles (or null)
CLTParameters clt_parameters,
double [] disparity_ref,
// motion blur compensation
double mb_tau, // 0.008; // time constant, sec
double mb_max_gain, // 5.0; // motion blur maximal gain (if more - move second point more than a pixel
double [][] mb_vectors, // now [2][ntiles];
final QuadCLT scene,
final QuadCLT ref_scene, // now - may be null - for testing if scene is rotated ref
final boolean show_nan,
double [][] pXpYD,
final int debugLevel){
int rendered_width = scene.getErsCorrection().getSensorWH()[0];
if (full_woi_in != null) {
rendered_width = full_woi_in.width * GPUTileProcessor.DTT_SIZE;
}
boolean showPxPyD = false;
if (showPxPyD) {
int dbg_width = rendered_width/GPUTileProcessor.DTT_SIZE;
int dbg_height = pXpYD.length/dbg_width;
double [][] dbg_img = new double [3 + ((mb_vectors!=null)? 2:0)][pXpYD.length];
String [] dbg_titles = (mb_vectors!=null)?
(new String[] {"pX","pY","Disparity","mb_X","mb_Y"}):
(new String[] {"pX","pY","Disparity"});
for (int i = 0; i < dbg_img.length; i++) {
Arrays.fill(dbg_img[i], Double.NaN);
}
for (int nTile = 0; nTile < pXpYD.length; nTile++){
if (pXpYD[nTile] != null) {
for (int i = 0; i < pXpYD[nTile].length; i++) {
dbg_img[i][nTile] = pXpYD[nTile][i];
}
}
if (mb_vectors!=null) {
for (int i = 0; i <2; i++) {
dbg_img[3 + i][nTile] = mb_tau * mb_vectors[i][nTile];
}
}
}
ShowDoubleFloatArrays.showArrays( // out of boundary 15
dbg_img,
dbg_width,
dbg_height,
true,
scene.getImageName()+"-pXpYD",
dbg_titles);
}
TpTask[][] tp_tasks;
if (mb_vectors!=null) {
tp_tasks = GpuQuad.setInterTasksMotionBlur( // "true" reference, with stereo actual reference will be offset
scene.getNumSensors(),
rendered_width, // should match output size, pXpYD.length
!scene.hasGPU(), // final boolean calcPortsCoordinatesAndDerivatives, // GPU can calculate them centreXY
pXpYD, // final double [][] pXpYD, // per-tile array of pX,pY,disparity triplets (or nulls)
null, // final boolean [] selection, // may be null, if not null do not process unselected tiles
// motion blur compensation
mb_tau, // final double mb_tau, // 0.008; // time constant, sec
mb_max_gain, // final double mb_max_gain, // 5.0; // motion blur maximal gain (if more - move second point more than a pixel
mb_vectors, //final double [][] mb_vectors, //
scene.getErsCorrection(), // final GeometryCorrection geometryCorrection,
clt_parameters.imp.disparity_corr, // 04/07/2023 //0.0, // final double disparity_corr,
-1, // 0, // margin, // final int margin, // do not use tiles if their centers are closer to the edges
null, // final boolean [] valid_tiles,
THREADS_MAX); // final int threadsMax) // maximal number of threads to launch
} else {
tp_tasks = new TpTask[1][];
tp_tasks[0] = GpuQuad.setInterTasks( // "true" reference, with stereo actual reference will be offset
scene.getNumSensors(),
rendered_width, // should match output size, pXpYD.length
!scene.hasGPU(), // final boolean calcPortsCoordinatesAndDerivatives, // GPU can calculate them centreXY
pXpYD, // final double [][] pXpYD, // per-tile array of pX,pY,disparity triplets (or nulls)
null, // final boolean [] selection, // may be null, if not null do not process unselected tiles
scene.getErsCorrection(), // final GeometryCorrection geometryCorrection,
clt_parameters.imp.disparity_corr, // 04/07/2023 // 0.0, // final double disparity_corr,
-1, // 0, // margin, // final int margin, // do not use tiles if their centers are closer to the edges
null, // final boolean [] valid_tiles,
THREADS_MAX); // final int threadsMax) // maximal number of threads to launch
}
scene.saveQuadClt(); // to re-load new set of Bayer images to the GPU (do nothing for CPU) and Geometry
ImageDtt image_dtt = new ImageDtt(
scene.getNumSensors(),
clt_parameters.transform_size,
clt_parameters.img_dtt,
scene.isAux(),
scene.isMonochrome(),
scene.isLwir(),
clt_parameters.getScaleStrength(scene.isAux()),
scene.getGPU());
boolean use_reference = false;
int [] wh = (full_woi_in == null)? null: new int[]{
full_woi_in.width * GPUTileProcessor.DTT_SIZE,
full_woi_in.height * GPUTileProcessor.DTT_SIZE};
int erase_clt = show_nan ? 1:0;
// boolean test1 = true;
if (mb_vectors!=null) {// && test1) {
image_dtt.setReferenceTDMotionBlur( // change to main?
erase_clt, //final int erase_clt,
wh, // null, // final int [] wh, // null (use sensor dimensions) or pair {width, height} in pixels
clt_parameters.img_dtt, // final ImageDttParameters imgdtt_params, // Now just extra correlation parameters, later will include, most others
use_reference, // true, // final boolean use_reference_buffer,
tp_tasks, // final TpTask[] tp_tasks,
clt_parameters.gpu_sigma_r, // final double gpu_sigma_r, // 0.9, 1.1
clt_parameters.gpu_sigma_b, // final double gpu_sigma_b, // 0.9, 1.1
clt_parameters.gpu_sigma_g, // final double gpu_sigma_g, // 0.6, 0.7
clt_parameters.gpu_sigma_m, // final double gpu_sigma_m, // = 0.4; // 0.7;
THREADS_MAX, // final int threadsMax, // maximal number of threads to launch
debugLevel); // final int globalDebugLevel);
} else {
image_dtt.setReferenceTD( // change to main?
erase_clt, //final int erase_clt,
wh, // null, // final int [] wh, // null (use sensor dimensions) or pair {width, height} in pixels
clt_parameters.img_dtt, // final ImageDttParameters imgdtt_params, // Now just extra correlation parameters, later will include, most others
use_reference, // true, // final boolean use_reference_buffer,
tp_tasks[0], // final TpTask[] tp_tasks,
clt_parameters.gpu_sigma_r, // final double gpu_sigma_r, // 0.9, 1.1
clt_parameters.gpu_sigma_b, // final double gpu_sigma_b, // 0.9, 1.1
clt_parameters.gpu_sigma_g, // final double gpu_sigma_g, // 0.6, 0.7
clt_parameters.gpu_sigma_m, // final double gpu_sigma_m, // = 0.4; // 0.7;
THREADS_MAX, // final int threadsMax, // maximal number of threads to launch
debugLevel); // final int globalDebugLevel);
}
double [][] double_render = scene.renderDoubleFromTD (
sensor_mask, // final int sensor_mask,
wh, // null, // int [] wh,
use_reference); // boolean use_reference
return double_render;
}
/** /**
* Prepare 16x16 texture tiles using GPU from disparity reference. Includes motion blur correction * Prepare 16x16 texture tiles using GPU from disparity reference. Includes motion blur correction
* Does not use scene.saveQuadClt() to re-load new set of Bayer images to the GPU -- should be done by caller * Does not use scene.saveQuadClt() to re-load new set of Bayer images to the GPU -- should be done by caller
...@@ -2264,6 +2430,58 @@ public class QuadCLT extends QuadCLTCPU { ...@@ -2264,6 +2430,58 @@ public class QuadCLT extends QuadCLTCPU {
return imp_stack; return imp_stack;
} }
/**
* Render single scene, combining all sensors as a double array [colorchn][pixel]
* @param sensor_mask sensors to use (normally -1 - all)
* @param clt_parameters
* @param wh null or {width, height}
* @param use_reference
* @return
*/
public double [][] renderDoubleFromTD ( // [scene][color][pixel]
int sensor_mask,
// CLTParameters clt_parameters,
int [] wh, // may be null, or {width, height}
boolean use_reference
) {
gpuQuad.execImcltRbgAll(
isMonochrome(),
use_reference,
wh); //int [] wh
// get data back from GPU
final float [][][] iclt_fimg = new float [getNumSensors()][][];
int nchn = 0;
int ncol = 0;
int nTiles = 0;
for (int ncam = 0; ncam < iclt_fimg.length; ncam++) if (((1 << ncam) & sensor_mask) != 0){
iclt_fimg[ncam] = gpuQuad.getRBG(ncam); // updated window
ncol = iclt_fimg[ncam].length;
nTiles = iclt_fimg[ncam][0].length;
nchn++;
}
final double scale = 1.0 / nchn;
final double [][] iclt_fimg_combo = new double [ncol][nTiles];
final Thread[] threads = ImageDtt.newThreadArray(THREADS_MAX);
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nTile = ai.getAndIncrement(); nTile < iclt_fimg_combo[0].length; nTile = ai.getAndIncrement()) {
for (int ncol = 0; ncol < iclt_fimg_combo.length; ncol++) {
double d = 0;
for (int i = 0; i < iclt_fimg.length; i++) if (iclt_fimg[i] != null) {
d+=iclt_fimg[i][ncol][nTile];
}
iclt_fimg_combo[ncol][nTile] = d * scale; // (float) (d * scale);
}
}
}
};
}
ImageDtt.startAndJoin(threads);
return iclt_fimg_combo;
}
......
...@@ -3044,96 +3044,6 @@ public class TexturedModel { ...@@ -3044,96 +3044,6 @@ public class TexturedModel {
} }
/*
if ((smooth_ground != null) && !ct_mode) { // no sense to use with CT? or compare with the common only?
if (has_sfm && gsmth_sfm_deviate) {
mixed_flat = true;
// Change!
// Use FG for FG, and ground plane as BG?
TileNeibs tn = new TileNeibs(tilesX, smooth_ground.length/tilesX);
for (int ntile = 0; ntile < smooth_ground.length; ntile++) {
double disp = (ss_fg_bg[1][ntile] > ss_fg_bg[0][ntile]) ? ds_fg_bg[1][ntile] : ds_fg_bg[1][ntile]; // strongest
if (disp > lim_disparity) {
disp = lim_disparity;
}
double str = Math.max(ss_fg_bg[1][ntile], ss_fg_bg[0][ntile]);
boolean strong = false;
if (disp > smooth_ground[ntile]) {
strong = str >= gsmth_str_bump;
} else {
strong = str >= gsmth_str_dip;
}
if (strong) {
check_neibs:
{
for (int dy = -gsmth_sfm_rad; dy <= gsmth_sfm_rad; dy++) {
for (int dx = -gsmth_sfm_rad; dx <= gsmth_sfm_rad; dx++) {
int ntile1 = tn.getNeibIndex(ntile, dx, dy);
if ((ntile1 < 0) || (sfm_gain[ntile1] < gsmth_sfm_gain)) {
strong = false;
break check_neibs;
}
}
}
if (disp > smooth_ground[ntile]) {
ds_fg_bg[0][ntile] = disp;
ss_fg_bg[0][ntile] = str;
ds_fg_bg[1][ntile] = smooth_ground[ntile];
ss_fg_bg[1][ntile] = small_strength;
} else {
ds_fg_bg[0][ntile] = Double.NaN;
ss_fg_bg[0][ntile] = 0.0;
smooth_ground[ntile] = disp;
ds_fg_bg[1][ntile] = smooth_ground[ntile];
ss_fg_bg[1][ntile] = str;
}
}
}
if (!strong) { // either was weak or broke check_neibs
// if (ct_mode) {
// ds_fg_bg[0][ntile] = lim_disparity;
// ss_fg_bg[0][ntile] = str;
// ds_fg_bg[1][ntile] = smooth_ground[ntile];
// ss_fg_bg[1][ntile] = small_strength;
// } else {
ds_fg_bg[0][ntile] = Double.NaN;
ds_fg_bg[1][ntile] = smooth_ground[ntile];
ss_fg_bg[0][ntile] = 0.0;
ss_fg_bg[1][ntile] = str;
// }
}
}
} else { // smooth surface only: nothing in FG, surface - in BG
if (!ct_mode) { // no sense to use smooth_ground in CT scan mode
for (int i = 0; i < smooth_ground.length; i++) {
ds_fg_bg[1][i] = smooth_ground[i];
ss_fg_bg[1][i] = Math.max(ss_fg_bg[1][i],ss_fg_bg[0][i]);
ss_fg_bg[1][i] = Math.max(ss_fg_bg[1][i],small_strength);
ds_fg_bg[0][i] = Double.NaN;
ss_fg_bg[0][i] = 0.0;
}
}
}
if (show_bs_debug && (debugLevel>1)) {
String [] dbg_titles = {"FGC","BGC","FG", "BG", "GND", "FS", "BS"};
double [][] dbg_img = {ds_fg_bg_common[0], ds_fg_bg_common[1], ds_fg_bg[0], ds_fg_bg[1], smooth_ground, ss_fg_bg_common[0], ss_fg_bg_common[1]}; // here BG==FG
ShowDoubleFloatArrays.showArrays(
dbg_img,
tilesX,
dbg_img[0].length/tilesX,
true,
ref_scene.getImageName()+"-fgc-bgc-fg-bg-disparity_strength-LMA-GND",
dbg_titles);
}
}
*/
if (show_bs_debug) { if (show_bs_debug) {
String [] dbg_titles = {"FGC","BGC","FG", "BG", "GND", "FS", "BS"}; String [] dbg_titles = {"FGC","BGC","FG", "BG", "GND", "FS", "BS"};
double [][] dbg_img = {ds_fg_bg_common[0], ds_fg_bg_common[1], ds_fg_bg[0], ds_fg_bg[1], smooth_ground, ss_fg_bg_common[0], ss_fg_bg_common[1]}; // here BG==FG double [][] dbg_img = {ds_fg_bg_common[0], ds_fg_bg_common[1], ds_fg_bg[0], ds_fg_bg[1], smooth_ground, ss_fg_bg_common[0], ss_fg_bg_common[1]}; // here BG==FG
...@@ -3248,10 +3158,9 @@ public class TexturedModel { ...@@ -3248,10 +3158,9 @@ public class TexturedModel {
min_trim_disparity, // final double min_trim_disparity, // 2.0; // do not try to trim texture outlines with lower disparities min_trim_disparity, // final double min_trim_disparity, // 2.0; // do not try to trim texture outlines with lower disparities
lin_text0, // final double [][][][] lin_textures, // null or [1][][][] to return non-normalized textures lin_text0, // final double [][][][] lin_textures, // null or [1][][][] to return non-normalized textures
(ct_scans != null), // ct_mode, // final boolean ct_mode, (ct_scans != null), // ct_mode, // final boolean ct_mode,
terrain_mode, // final boolean terrain_mode, Just for file name terrain_mode, // final boolean terrain_mode, just for file name
debugLevel); // final int debug_level) debugLevel); // final int debug_level)
if (ct_scans != null) { if (ct_scans != null) {
// ct_scans[nscan] = new double [][] {faded_textures[0][0].clone(),faded_textures[1][0].clone()}; // both slices, channel 0
ct_scans[nscan] = new double [faded_textures.length][]; ct_scans[nscan] = new double [faded_textures.length][];
for (int i = 0; i < ct_scans[nscan].length; i++) { for (int i = 0; i < ct_scans[nscan].length; i++) {
ct_scans[nscan][i] = faded_textures[i][0].clone(); // channel 0 from each slice ct_scans[nscan][i] = faded_textures[i][0].clone(); // channel 0 from each slice
...@@ -3266,15 +3175,6 @@ public class TexturedModel { ...@@ -3266,15 +3175,6 @@ public class TexturedModel {
titles[nscan] = String.format("D=%.3f", ct_min+ct_step*nscan); titles[nscan] = String.format("D=%.3f", ct_min+ct_step*nscan);
} }
titles[titles.length-1]="full"; titles[titles.length-1]="full";
/*
ShowDoubleFloatArrays.showArrays(
ct_disparity,
tilesX,
ct_disparity[0].length/tilesX,
true,
ref_scene.getImageName()+"-CT_scan_disparities",
titles);
*/
double ct_max = ct_min+ct_step* (ct_scans.length-1); double ct_max = ct_min+ct_step* (ct_scans.length-1);
String suffix = String.format("-CT_SCAN_%.3f_%.3f_%.3f",ct_min,ct_max,ct_step); String suffix = String.format("-CT_SCAN_%.3f_%.3f_%.3f",ct_min,ct_max,ct_step);
scenes[ref_index].saveDoubleArrayInModelDirectory( scenes[ref_index].saveDoubleArrayInModelDirectory(
......
...@@ -8660,6 +8660,9 @@ if (debugLevel > -100) return true; // temporarily ! ...@@ -8660,6 +8660,9 @@ if (debugLevel > -100) return true; // temporarily !
} }
first_in_series = false; // at least once success in this series first_in_series = false; // at least once success in this series
System.out.println("adding to video_sets_list start="+start_ref_pointers[0]+", end="+ref_index); // start_ref_pointers[1]); System.out.println("adding to video_sets_list start="+start_ref_pointers[0]+", end="+ref_index); // start_ref_pointers[1]);
// if ((video_list != null) && (video_list.length>0) && (video_list[0] == null)) {
// video_list[0]= new String[0];
// }
for (int i= 0; i < video_list[0].length; i++) { for (int i= 0; i < video_list[0].length; i++) {
System.out.println(i+": "+video_list[0][i]+", "+widths_list[0][i]); System.out.println(i+": "+video_list[0][i]+", "+widths_list[0][i]);
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment