Commit 77684393 authored by Andrey Filippov's avatar Andrey Filippov

implemented synthetic stereo video generation

parent 7c62f986
......@@ -2227,6 +2227,9 @@ public class EyesisCorrectionParameters {
null, // filter
this.x3dDirectory); //this.sourceDirectory);
if (dir!=null) {
while (dir.endsWith(Prefs.getFileSeparator())) {
dir = dir.substring(0, dir.length()-1);
}
this.x3dDirectory=dir;
if (this.use_x3d_subdirs && (name != null) && !name.equals("")) {
// name = this.x3dDirectory + Prefs.getFileSeparator(); // +this.x3dSubdirPrefix + name + this.x3dSubdirSuffix;
......@@ -2243,6 +2246,9 @@ public class EyesisCorrectionParameters {
name); //this.x3dDirectory + Prefs.getFileSeparator()+name); //this.sourceDirectory);
}
}
while (dir.endsWith(Prefs.getFileSeparator())) {
dir = dir.substring(0, dir.length()-1);
}
return dir;
}
......
......@@ -51,6 +51,9 @@ public class IntersceneMatchParameters {
public boolean generate_mapped = true;
public boolean generate_stereo = false;
public double stereo_baseline = 500.0;// mm
public boolean stereo_merge = true;
public int stereo_gap = 30; // pixels between right and left frames
public int extra_hor_tile = 15;
public int extra_vert_tile = 10;
public boolean crop_3d = true; // do not show extra of FG/BG views (currently they only ref scene has disparity)
......@@ -74,7 +77,9 @@ public class IntersceneMatchParameters {
public boolean remove_avi = true; // remove avi after conversion to webm
public boolean um_mono = true; // applies to both TIFF and AVI
public double um_sigma = 10;
public double um_weight = 0.9; //
public double um_weight = 0.97; //
public boolean mono_fixed = true; // normalize to fixed range when converting to 8 bits
public double mono_range = 500.0; // monochrome full-scale range (+/- half)
public boolean annotate_color = true; // annotate pseudo-color video frames with timestamps
public boolean annotate_mono = true; // annotate monochrome video frames with timestamps
public Color annotate_color_color = new Color( 255, 255, 255); // greenish over "fire"
......@@ -306,7 +311,16 @@ public class IntersceneMatchParameters {
gd.addMessage ("Generate/show scene sequences");
gd.addCheckbox ("Generate mapped scene sequence", this.generate_mapped,
"Generate scene sequence mapped to the reference scene");
"Generate scene sequence mapped to the reference scene.");
gd.addCheckbox ("Generate right/left pairs", this.generate_stereo,
"Generate stereo-pairs for 3D-corrected videos (FG,BG).");
gd.addNumericField("Stereo baseline", this.stereo_baseline, 5,7,"mm",
"Synthetic 3D with possibly exagerrated stereo baseline");
gd.addCheckbox ("Stereo merge right, left", this.stereo_merge,
"Combine stereo pair in a single (wide) frame. Unchecked - generate separate videos.");
gd.addNumericField("Stereo gap", this.stereo_gap, 0,3,"pix",
"Distance (pixels) between right and left sub-images in a stereo frame.");
gd.addNumericField("Scene sequence horizontal extra", this.extra_hor_tile, 0,3,"tiles",
"Enlarge reference scene window horizontally in each direction to accommodate other scenes in a sequence");
gd.addNumericField("Scene sequence vertical extra", this.extra_vert_tile, 0,3,"tiles",
......@@ -319,7 +333,7 @@ public class IntersceneMatchParameters {
gd.addCheckbox ("Merge all channels in 3D modes", this.merge_all,
"Ignore sensor mask, use all channels and merge them into one in 3D modes (FG and BG)");
gd. addChoice("3D mode ", MODES3D, MODES3D[this.mode3d + 1],
gd. addChoice("3D mode", MODES3D, MODES3D[this.mode3d + 1],
"3D mode for rendering scenes in a sequence: RAW - raw images, INF - no 3D, use infinity; FG - Foreground; BG - Background");
......@@ -358,10 +372,17 @@ public class IntersceneMatchParameters {
gd.addCheckbox ("Apply unsharp mask to mono", this.um_mono,
"Apply unsharp mask to monochrome image sequences/video. Applies to TIFF generatiojn too");
gd.addNumericField("Unsharp mask sigma (radius)", this.um_sigma, 5,7,"pix",
"");
"Unsharp mask Gaussian sigma.");
gd.addNumericField("Unsharp mask weight", this.um_weight, 5,7,"",
"Unsharp mask weightt (multiply blurred version before subtraction from the original).");
gd.addCheckbox ("Fixed monochrome range", this.mono_fixed,
"Normalize monochrome (after UM) to a fixed range when converting to 8 bit RGB.");
gd.addNumericField("Monochrome full range", this.mono_range, 5,7,"",
"Monochrome full range to convert to 0..255.");
gd.addCheckbox ("Timestamp color videos", this.annotate_color,
"Annotate pseudo-color video frames with timestamps.");
gd.addCheckbox ("Timestamp monochrome videos", this.annotate_mono,
......@@ -547,8 +568,12 @@ public class IntersceneMatchParameters {
this.diff_from_lma_neg = gd.getNextNumber();
this.outliers_lma_nth_fromextrem=(int)gd.getNextNumber();
this.filter_margin = (int) gd.getNextNumber();
this.generate_mapped = gd.getNextBoolean();
this.generate_stereo = gd.getNextBoolean();
this.stereo_baseline = gd.getNextNumber();
this.stereo_merge = gd.getNextBoolean();
this.stereo_gap = (int) gd.getNextNumber();
this.generate_mapped = gd.getNextBoolean();
this.extra_hor_tile = (int) gd.getNextNumber();
this.extra_vert_tile = (int) gd.getNextNumber();
this.crop_3d = gd.getNextBoolean();
......@@ -574,7 +599,8 @@ public class IntersceneMatchParameters {
this.um_mono = gd.getNextBoolean();
this.um_sigma = gd.getNextNumber();
this.um_weight = gd.getNextNumber();
this.mono_fixed = gd.getNextBoolean();
this.mono_range = gd.getNextNumber();
this.annotate_color = gd.getNextBoolean();
this.annotate_mono = gd.getNextBoolean();
{
......@@ -707,8 +733,12 @@ public class IntersceneMatchParameters {
properties.setProperty(prefix+"diff_from_lma_neg", this.diff_from_lma_neg+""); // double
properties.setProperty(prefix+"outliers_lma_nth_fromextrem", this.outliers_lma_nth_fromextrem+""); // int
properties.setProperty(prefix+"filter_margin", this.filter_margin+""); // int
properties.setProperty(prefix+"generate_mapped", this.generate_mapped+""); // boolean
properties.setProperty(prefix+"generate_stereo", this.generate_stereo+""); // boolean
properties.setProperty(prefix+"stereo_baseline", this.stereo_baseline+""); // double
properties.setProperty(prefix+"stereo_merge", this.stereo_merge+""); // boolean
properties.setProperty(prefix+"stereo_gap", this.stereo_gap+""); // int
properties.setProperty(prefix+"generate_mapped", this.generate_mapped+""); // boolean
properties.setProperty(prefix+"extra_hor_tile", this.extra_hor_tile+""); // int
properties.setProperty(prefix+"extra_vert_tile", this.extra_vert_tile+""); // int
properties.setProperty(prefix+"crop_3d", this.crop_3d+""); // boolean
......@@ -733,7 +763,8 @@ public class IntersceneMatchParameters {
properties.setProperty(prefix+"um_mono", this.um_mono+""); // boolean
properties.setProperty(prefix+"um_sigma", this.um_sigma+""); // double
properties.setProperty(prefix+"um_weight", this.um_weight+""); // double
properties.setProperty(prefix+"mono_fixed", this.mono_fixed+""); // boolean
properties.setProperty(prefix+"mono_range", this.mono_range+""); // double
properties.setProperty(prefix+"annotate_color", this.annotate_color+""); // boolean
properties.setProperty(prefix+"annotate_mono", this.annotate_mono+""); // boolean
{
......@@ -849,8 +880,13 @@ public class IntersceneMatchParameters {
if (properties.getProperty(prefix+"diff_from_lma_neg")!=null) this.diff_from_lma_neg=Double.parseDouble(properties.getProperty(prefix+"diff_from_lma_neg"));
if (properties.getProperty(prefix+"outliers_lma_nth_fromextrem")!=null) this.outliers_lma_nth_fromextrem=Integer.parseInt(properties.getProperty(prefix+"outliers_lma_nth_fromextrem"));
if (properties.getProperty(prefix+"filter_margin")!=null) this.filter_margin=Integer.parseInt(properties.getProperty(prefix+"filter_margin"));
if (properties.getProperty(prefix+"generate_mapped")!=null) this.generate_mapped=Boolean.parseBoolean(properties.getProperty(prefix+"generate_mapped"));
if (properties.getProperty(prefix+"generate_stereo")!=null) this.generate_stereo=Boolean.parseBoolean(properties.getProperty(prefix+"generate_stereo"));
if (properties.getProperty(prefix+"stereo_baseline")!=null) this.stereo_baseline=Double.parseDouble(properties.getProperty(prefix+"stereo_baseline"));
if (properties.getProperty(prefix+"stereo_merge")!=null) this.stereo_merge=Boolean.parseBoolean(properties.getProperty(prefix+"stereo_merge"));
if (properties.getProperty(prefix+"stereo_gap")!=null) this.stereo_gap=Integer.parseInt(properties.getProperty(prefix+"stereo_gap"));
if (properties.getProperty(prefix+"generate_mapped")!=null) this.generate_mapped=Boolean.parseBoolean(properties.getProperty(prefix+"generate_mapped"));
if (properties.getProperty(prefix+"extra_hor_tile")!=null) this.extra_hor_tile=Integer.parseInt(properties.getProperty(prefix+"extra_hor_tile"));
if (properties.getProperty(prefix+"extra_vert_tile")!=null) this.extra_vert_tile=Integer.parseInt(properties.getProperty(prefix+"extra_vert_tile"));
if (properties.getProperty(prefix+"crop_3d")!=null) this.crop_3d=Boolean.parseBoolean(properties.getProperty(prefix+"crop_3d"));
......@@ -875,6 +911,8 @@ public class IntersceneMatchParameters {
if (properties.getProperty(prefix+"um_mono")!=null) this.um_mono=Boolean.parseBoolean(properties.getProperty(prefix+"um_mono"));
if (properties.getProperty(prefix+"um_sigma")!=null) this.um_sigma=Double.parseDouble(properties.getProperty(prefix+"um_sigma"));
if (properties.getProperty(prefix+"um_weight")!=null) this.um_weight=Double.parseDouble(properties.getProperty(prefix+"um_weight"));
if (properties.getProperty(prefix+"mono_fixed")!=null) this.mono_fixed=Boolean.parseBoolean(properties.getProperty(prefix+"mono_fixed"));
if (properties.getProperty(prefix+"mono_range")!=null) this.mono_range=Double.parseDouble(properties.getProperty(prefix+"mono_range"));
if (properties.getProperty(prefix+"annotate_color")!=null) this.annotate_color=Boolean.parseBoolean(properties.getProperty(prefix+"annotate_color"));
if (properties.getProperty(prefix+"annotate_mono")!=null) this.annotate_mono=Boolean.parseBoolean(properties.getProperty(prefix+"annotate_mono"));
......@@ -995,8 +1033,12 @@ public class IntersceneMatchParameters {
imp.diff_from_lma_neg = this.diff_from_lma_neg;
imp.outliers_lma_nth_fromextrem = this.outliers_lma_nth_fromextrem;
imp.filter_margin = this.filter_margin;
imp.generate_mapped = this.generate_mapped;
imp.generate_mapped = this.generate_mapped;
imp.generate_stereo = this.generate_stereo;
imp.stereo_baseline = this.stereo_baseline;
imp.stereo_merge = this.stereo_merge;
imp.stereo_gap = this.stereo_gap;
imp.extra_hor_tile = this.extra_hor_tile;
imp.extra_vert_tile = this.extra_vert_tile;
imp.crop_3d = this.crop_3d;
......@@ -1022,7 +1064,8 @@ public class IntersceneMatchParameters {
imp.um_mono = this. um_mono;
imp.um_sigma = this. um_sigma;
imp.um_weight = this. um_weight;
imp.mono_fixed = this. mono_fixed;
imp.mono_range = this. mono_range;
imp.annotate_color = this. annotate_color;
imp.annotate_mono = this. annotate_mono;
......
......@@ -2780,6 +2780,9 @@ public class OpticalFlow {
}
}
}
final boolean ref_is_identity =
(scene_xyz[0]==0.0) && (scene_xyz[1]==0.0) && (scene_xyz[1]==0.0) &&
(scene_atr[0]==0.0) && (scene_atr[1]==0.0) && (scene_atr[1]==0.0);
final double [] disparity_ref = dref;
// final int tilesX_ref = ref_w;
// final int tilesY_ref = ref_h;
......@@ -2806,7 +2809,7 @@ public class OpticalFlow {
if (disparity < 0) {
disparity = 1.0* disparity; // 0.0;
}
if (scene_QuadClt == reference_QuadClt) {
if ((scene_QuadClt == reference_QuadClt) && (ref_is_identity)) {
pXpYD[nTile] = new double [] {centerX, centerY, disparity};
} else {
pXpYD[nTile] = ersReferenceCorrection.getImageCoordinatesERS( // ersCorrection - reference
......@@ -3982,6 +3985,12 @@ public class OpticalFlow {
double range_max = clt_parameters.imp.range_max ; // 5000.0;
boolean generate_mapped = clt_parameters.imp.generate_mapped;
boolean generate_stereo = clt_parameters.imp.generate_stereo;
double stereo_baseline = clt_parameters.imp.stereo_baseline;
double stereo_baseline_meters = 0.001 * stereo_baseline;
boolean stereo_merge = clt_parameters.imp.stereo_merge;
int stereo_gap = clt_parameters.imp.stereo_gap;
int extra_hor_tile = clt_parameters.imp.extra_hor_tile;
int extra_vert_tile = clt_parameters.imp.extra_vert_tile;
boolean crop_3d = clt_parameters.imp.crop_3d;
......@@ -4005,6 +4014,9 @@ public class OpticalFlow {
boolean um_mono = clt_parameters.imp.um_mono;
double um_sigma = clt_parameters.imp.um_sigma;
double um_weight = clt_parameters.imp.um_weight;
boolean mono_fixed = clt_parameters.imp.mono_fixed;
double mono_range = clt_parameters.imp.mono_range;
boolean annotate_color = clt_parameters.imp.annotate_color;
boolean annotate_mono = clt_parameters.imp.annotate_mono;
......@@ -4438,111 +4450,185 @@ public class OpticalFlow {
if (!toRGB && um_mono) {
scenes_suffix+=String.format("-UM%.1f_%.2f",um_sigma,um_weight);
}
ImagePlus imp_scenes = renderSceneSequence(
clt_parameters, // CLTParameters clt_parameters,
fov_tiles, // Rectangle fov_tiles,
mode3d, // int mode3d,
toRGB, // boolean toRGB,
sensor_mask, // int sensor_mask,
scenes_suffix, // String suffix,
selected_disparity, // double [] ref_disparity,
quadCLTs, // QuadCLT [] quadCLTs,
debugLevel); // int debugLevel);
if (toRGB ? save_mapped_color: save_mapped_mono) {
quadCLTs[ref_index].saveImagePlusInModelDirectory(
null, // "GPU-SHIFTED-D"+clt_parameters.disparity, // String suffix,
imp_scenes); // ImagePlus imp)
}
// Save as AVI
if (toRGB ? gen_avi_color: gen_avi_mono) {
if (toRGB ? annotate_color: annotate_mono) {
// If it is mono, first convert to color
ImageConverter imageConverter = new ImageConverter(imp_scenes);
imageConverter.convertToRGB(); // Did it convert imp_scenes ?
int num_stereo = (generate_stereo && (mode3d > 0))? 2:1; // only for 3D views
boolean combine_left_right = (num_stereo > 1) && stereo_merge;
ImagePlus [] imp_scenes_pair = new ImagePlus[num_stereo];
String scenes_suffix_pair = scenes_suffix;
for (int nstereo = 0; nstereo < num_stereo; nstereo++) {
double [] xyz_offset = {
-stereo_baseline_meters * (nstereo - 0.5) * (num_stereo - 1), // x offset
0.0, // Y offset
0.0}; // Z offset
if (num_stereo > 1) {
// scenes_suffix = scenes_suffix_pair + ((nstereo > 0)?(combine_left_right?"-STEREO":"-RIGHT"):"-LEFT"); // check if opposite
scenes_suffix = scenes_suffix_pair + ((nstereo > 0)?"-RIGHT":"-LEFT"); // check if opposite
scenes_suffix += stereo_baseline;
}
final Color fcolor = toRGB ? annotate_color_color: annotate_color_mono;
final ImageStack fstack_scenes = imp_scenes.getImageStack();
final int width = imp_scenes.getWidth();
final int height = imp_scenes.getHeight();
final int posX= width - 119; // 521;
final int posY= height + 1; // 513;
final Font font = new Font("Monospaced", Font.PLAIN, 12);
final int nSlices = fstack_scenes.getSize();
final Thread[] threads = ImageDtt.newThreadArray(QuadCLT.THREADS_MAX);
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nSlice = ai.getAndIncrement(); nSlice < nSlices; nSlice = ai.getAndIncrement()) {
String scene_title = fstack_scenes.getSliceLabel(nSlice+1);
ImageProcessor ip = fstack_scenes.getProcessor(nSlice+1);
ip.setColor(fcolor); // Color.BLUE);
ip.setFont(font);
if (toRGB) {
ip.drawString(scene_title, posX, posY,Color.BLACK);
} else {
ip.drawString(scene_title, posX, posY);
}
}
}
};
}
ImageDtt.startAndJoin(threads);
String avi_path=null;
video:
{
try {
avi_path=quadCLTs[ref_index].saveAVIInModelDirectory(
null, // "GPU-SHIFTED-D"+clt_parameters.disparity, // String suffix,
mode_avi, // int avi_mode,
avi_JPEG_quality, // int avi_JPEG_quality,
video_fps, // double fps,
imp_scenes); // ImagePlus imp)
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
break video;
}
// Convert with ffmpeg?
if (avi_path == null) {
break video;
// ImagePlus imp_scenes
imp_scenes_pair[nstereo]= renderSceneSequence(
clt_parameters, // CLTParameters clt_parameters,
fov_tiles, // Rectangle fov_tiles,
mode3d, // int mode3d,
toRGB, // boolean toRGB,
xyz_offset, // double [] stereo_offset, // offset reference camera {x,y,z}
sensor_mask, // int sensor_mask,
scenes_suffix, // String suffix,
selected_disparity, // double [] ref_disparity,
quadCLTs, // QuadCLT [] quadCLTs,
debugLevel); // int debugLevel);
if (toRGB ? save_mapped_color: save_mapped_mono) {
quadCLTs[ref_index].saveImagePlusInModelDirectory(
null, // "GPU-SHIFTED-D"+clt_parameters.disparity, // String suffix,
imp_scenes_pair[nstereo]); // imp_scenes); // ImagePlus imp)
}
// Save as AVI
if (toRGB ? gen_avi_color: gen_avi_mono) {
if (toRGB ? annotate_color: annotate_mono) {
if (!toRGB) {
// If it is mono, first convert to color
if (mono_fixed && um_mono) {
// imp_scenes.getProcessor().setMinAndMax(-mono_range/2, mono_range/2);
imp_scenes_pair[nstereo].getProcessor().setMinAndMax(-mono_range/2, mono_range/2);
}
// ImageConverter imageConverter = new ImageConverter(imp_scenes);
ImageConverter imageConverter = new ImageConverter(imp_scenes_pair[nstereo]);
imageConverter.convertToRGB(); // Did it convert imp_scenes ?
}
final Color fcolor = toRGB ? annotate_color_color: annotate_color_mono;
final ImageStack fstack_scenes = imp_scenes_pair[nstereo].getImageStack();
final int width = imp_scenes_pair[nstereo].getWidth();
final int height = imp_scenes_pair[nstereo].getHeight();
final int posX= width - 119; // 521;
final int posY= height + 1; // 513;
final Font font = new Font("Monospaced", Font.PLAIN, 12);
final int nSlices = fstack_scenes.getSize();
final Thread[] threads = ImageDtt.newThreadArray(QuadCLT.THREADS_MAX);
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nSlice = ai.getAndIncrement(); nSlice < nSlices; nSlice = ai.getAndIncrement()) {
String scene_title = fstack_scenes.getSliceLabel(nSlice+1);
ImageProcessor ip = fstack_scenes.getProcessor(nSlice+1);
ip.setColor(fcolor); // Color.BLUE);
ip.setFont(font);
if (toRGB) {
ip.drawString(scene_title, posX, posY,Color.BLACK);
} else {
ip.drawString(scene_title, posX, posY);
}
}
}
};
}
ImageDtt.startAndJoin(threads);
}
if (!run_ffmpeg) {
break video; // webm not requested
if (combine_left_right && (nstereo == 0)) {
continue;
}
String webm_path = avi_path.substring(0, avi_path.length()-4)+video_ext;
// added -y not to as "overwrite y/n?"
String shellCommand = String.format("ffmpeg -y -i %s -c %s %s %s",
avi_path, video_codec, video_extra, webm_path);
Process p = null;
try {
p = Runtime.getRuntime().exec(shellCommand);
} catch (IOException e) {
System.out.println("Failed shell command: \""+shellCommand+"\"");
if (combine_left_right) { // combine pairs multi-threaded
// stack_scenes = new ImageStack(imp_scene.getWidth(),imp_scene.getHeight());
final int left_width = imp_scenes_pair[0].getWidth();
final int right_width = imp_scenes_pair[1].getWidth();
final int stereo_width = left_width + right_width+stereo_gap;
final int stereo_height = imp_scenes_pair[0].getHeight();
final ImageStack stereo_stack = new ImageStack(stereo_width, stereo_height);
final int nSlices = imp_scenes_pair[0].getStack().getSize();
for (int i = 0; i < nSlices; i++) {
stereo_stack.addSlice(
imp_scenes_pair[0].getStack().getSliceLabel(i+1),
new int[stereo_width * stereo_height]);
}
final Thread[] threads = ImageDtt.newThreadArray(QuadCLT.THREADS_MAX);
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nSlice = ai.getAndIncrement(); nSlice < nSlices; nSlice = ai.getAndIncrement()) {
int[] pixels_stereo = (int[]) stereo_stack.getPixels(nSlice+1);
int[] pixels_left = (int[]) imp_scenes_pair[0].getStack().getPixels(nSlice+1);
int[] pixels_right = (int[]) imp_scenes_pair[1].getStack().getPixels(nSlice+1);
for (int row = 0; row < stereo_height; row++) {
System.arraycopy(
pixels_left,
left_width * row,
pixels_stereo,
stereo_width * row,
left_width);
System.arraycopy(
pixels_right,
right_width * row,
pixels_stereo,
stereo_width * row + left_width + stereo_gap,
right_width);
}
}
}
};
}
ImageDtt.startAndJoin(threads);
// convert stereo_stack to imp_scenes_pair[1], keeping calibration and fps?
imp_scenes_pair[1].setStack(stereo_stack);
String title = imp_scenes_pair[1].getTitle();
imp_scenes_pair[1].setTitle(title.replace("-RIGHT","-STEREO"));
}
if (p != null) {
p.waitFor();
String avi_path=null;
video:
{
try {
avi_path=quadCLTs[ref_index].saveAVIInModelDirectory(
null, // "GPU-SHIFTED-D"+clt_parameters.disparity, // String suffix,
mode_avi, // int avi_mode,
avi_JPEG_quality, // int avi_JPEG_quality,
video_fps, // double fps,
imp_scenes_pair[nstereo]); // ImagePlus imp)
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
break video;
}
// Convert with ffmpeg?
if (avi_path == null) {
break video;
}
if (!run_ffmpeg) {
break video; // webm not requested
}
String webm_path = avi_path.substring(0, avi_path.length()-4)+video_ext;
// added -y not to as "overwrite y/n?"
String shellCommand = String.format("ffmpeg -y -i %s -c %s %s %s",
avi_path, video_codec, video_extra, webm_path);
Process p = null;
try {
p = Runtime.getRuntime().exec(shellCommand);
} catch (IOException e) {
System.out.println("Failed shell command: \""+shellCommand+"\"");
}
if (p != null) {
p.waitFor();
}
System.out.println("Ran shell command: \""+shellCommand+"\"");
// Check if webm file exists
if (!(new File(webm_path)).exists()) {
System.out.println("Failed to create : \""+webm_path+"\"");
break video;
}
if (remove_avi) {
(new File(avi_path)).delete();
System.out.println("Deleted AVI video file: \""+avi_path+"\"");
}
}
System.out.println("Ran shell command: \""+shellCommand+"\"");
// Check if webm file exists
if (!(new File(webm_path)).exists()) {
System.out.println("Failed to create : \""+webm_path+"\"");
break video;
}
if (remove_avi) {
(new File(avi_path)).delete();
System.out.println("Deleted AVI video file: \""+avi_path+"\"");
}
//ffmpeg -i 1654629772_573400-SEQ-FG-COLOR.avi -c vp8 -b:v 0 -crf 40 1654629772_573400-SEQ-FG-COLOR-VP8.webm
}
// String video_extra = clt_parameters.imp.video_extra;
}
if (toRGB ? show_mapped_color: show_mapped_mono) {
imp_scenes.show();
}
}
if (toRGB ? show_mapped_color: show_mapped_mono) {
imp_scenes_pair[nstereo].show();
}
} // for (int nstereo = 0; nstereo < num_stereo; nstereo++)
} // for (int col_mode = 0; col_mode<2; col_mode++) {
}
if (export_images) {
if (combo_dsn_final == null) {
......@@ -5105,13 +5191,15 @@ public class OpticalFlow {
Rectangle fov_tiles,
int mode3d,
boolean toRGB,
double [] stereo_xyz, // offset reference camera {x,y,z}
int sensor_mask,
String suffix_in,
double [] ref_disparity,
QuadCLT [] quadCLTs,
int debugLevel) {
double [] stereo_atr = ZERO3; // maybe later play with rotated camera
// video
/*
boolean gen_avi_color = clt_parameters.imp.gen_avi_color;
boolean gen_avi_mono = clt_parameters.imp.gen_avi_mono;
double video_fps = clt_parameters.imp.video_fps;
......@@ -5120,6 +5208,7 @@ public class OpticalFlow {
String video_ext = clt_parameters.imp.video_ext;
String video_codec = clt_parameters.imp.video_codec;
boolean remove_avi = clt_parameters.imp.remove_avi;
*/
boolean um_mono = clt_parameters.imp.um_mono;
double um_sigma = clt_parameters.imp.um_sigma;
double um_weight = clt_parameters.imp.um_weight;
......@@ -5159,6 +5248,15 @@ public class OpticalFlow {
scene_ers_xyz_dt, // double [] ers_xyz_dt,
scene_ers_atr_dt); // double [] ers_atr_dt)(ers_scene_original_xyz_dt);
}
if (stereo_xyz != null) { // offset all, including reference scene
double [][] combo_xyzatr = ErsCorrection.combineXYZATR(
stereo_xyz, // double [] reference_xyz,
stereo_atr, // double [] reference_atr,
scene_xyz, // double [] scene_xyz,
scene_atr); // double [] scene_atr)
scene_xyz = combo_xyzatr[0];
scene_atr = combo_xyzatr[1];
}
int sm = merge_all? -1: sensor_mask;
ImagePlus imp_scene = QuadCLT.renderGPUFromDSI(
sm, // final int sensor_mask,
......@@ -5166,6 +5264,7 @@ public class OpticalFlow {
fov_tiles, // testr, // null, // final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters, // CLTParameters clt_parameters,
ref_disparity, // double [] disparity_ref,
// not used, just as null/not null now
scene_xyz, // final double [] scene_xyz, // camera center in world coordinates
scene_atr, // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs[nscene], // final QuadCLT scene,
......
......@@ -2043,6 +2043,8 @@ public class QuadCLT extends QuadCLTCPU {
final Rectangle full_woi_in, // show larger than sensor WOI in tiles (or null)
CLTParameters clt_parameters,
double [] disparity_ref,
// not used, just as null/not null now. All offsets are already in scene_xyz, scene_atr (including ref)
// double [] stereo_offset, // offset reference camera {x,y,z} or null
final double [] scene_xyz, // camera center in world coordinates
final double [] scene_atr, // camera orientation relative to world frame
final QuadCLT scene,
......@@ -2052,7 +2054,7 @@ public class QuadCLT extends QuadCLTCPU {
int threadsMax,
final int debugLevel){
boolean show_nan = toRGB? clt_parameters.imp.show_color_nan : clt_parameters.imp.show_mono_nan;
double [][] pXpYD =OpticalFlow.transformToScenePxPyD(
double [][] pXpYD =OpticalFlow.transformToScenePxPyD( // now should work with offset ref_scene
full_woi_in, // final Rectangle [] extra_woi, // show larger than sensor WOI (or null)
disparity_ref, // final double [] disparity_ref, // invalid tiles - NaN in disparity
scene_xyz, // final double [] scene_xyz, // camera center in world coordinates
......@@ -2065,7 +2067,7 @@ public class QuadCLT extends QuadCLTCPU {
rendered_width = full_woi_in.width * GPUTileProcessor.DTT_SIZE;
}
//scene_QuadClt.getTileProcessor().getTileSize();
TpTask[] tp_tasks_ref = GpuQuad.setInterTasks( // inter?
TpTask[] tp_tasks_ref = GpuQuad.setInterTasks( // "true" reference, with stereo actual reference will be offset
scene.getNumSensors(),
rendered_width, // should match output size, pXpYD.length
!scene.hasGPU(), // final boolean calcPortsCoordinatesAndDerivatives, // GPU can calculate them centreXY
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment