Commit ee4732b5 authored by Andrey Filippov's avatar Andrey Filippov

Before fixing renderGPUFromDSI with offset viewpoint

parent 90aa5914
......@@ -6818,7 +6818,7 @@ public class Eyesis_Correction implements PlugIn, ActionListener {
CLT_PARAMETERS.setRGBParameters(RGB_PARAMETERS);
try {
TWO_QUAD_CLT.buildSeriesTQ(
TwoQuadCLT.buildSeriesTQ(
quadCLT, // QUAD_CLT, // QuadCLT quadCLT_main,
-1, // int ref_index,
0, // int ref_step,
......
......@@ -317,11 +317,13 @@ public class ErsCorrection extends GeometryCorrection {
public void setPropertiesScenes(String prefix, Properties properties){
String [] timestamps = getScenes();
for (String k : timestamps) {
String [] s_scenes = getScene(k).toStrings();
properties.setProperty(prefix+SCENES_PREFIX+"_"+k, s_scenes[0]);
properties.setProperty(prefix+SCENES_PREFIX+"_"+k+"_dt", s_scenes[1]);
properties.setProperty(prefix+SCENES_PREFIX+"_"+k+"_d2t", s_scenes[2]);
// properties.setProperty(prefix+SCENES_PREFIX+"_"+k, getScene(k).toString());
if (getScene(k) != null) {
String [] s_scenes = getScene(k).toStrings(); // null pointer
properties.setProperty(prefix+SCENES_PREFIX+"_"+k, s_scenes[0]);
properties.setProperty(prefix+SCENES_PREFIX+"_"+k+"_dt", s_scenes[1]);
properties.setProperty(prefix+SCENES_PREFIX+"_"+k+"_d2t", s_scenes[2]);
// properties.setProperty(prefix+SCENES_PREFIX+"_"+k, getScene(k).toString());
}
}
}
......
......@@ -27,6 +27,7 @@ package com.elphel.imagej.tileprocessor;
import java.awt.Color;
import java.io.IOException;
import java.util.Properties;
import java.util.StringTokenizer;
import com.elphel.imagej.common.GenericJTabbedDialog;
......@@ -57,9 +58,15 @@ public class IntersceneMatchParameters {
public boolean show_color_nan = true; // use NAN background for color images (sharp, but distinct black)
public boolean show_mono_nan = false; // use NAN background for monochrome images (sharp, but distinct black)
// public double [] stereo_bases = {0.0, 200.0, 500.0, 1000.0};
public double [][] stereo_views = { // base, up, back
{ 0.0, 0.0, 0.0},
{ 200.0, 0.0, 0.0},
{ 500.0, 0.0, 2000.0},
{1000.0, 500.0, 3000.0}};
public double [] stereo_bases = {0.0, 200.0, 500.0, 1000.0};
public boolean [] generate_stereo_var = new boolean[stereo_bases.length];
// public boolean [] generate_stereo_var = new boolean[stereo_bases.length];
public boolean [] generate_stereo_var = new boolean[stereo_views.length];
// Other parameters
public int min_num_scenes = 10; // abandon series if there are less than this number of scenes in it
......@@ -89,24 +96,30 @@ public class IntersceneMatchParameters {
public boolean gen_avi_mono = true; // will use save_mapped_mono
public double video_fps = 15; // 4x slowmo
public double sensor_fps = 60; // sensor fps
public int mode_avi = 0; // 0 - raw, 1 - JPEG, 2 - PNG
public int mode_avi = 0; // 0 - raw, 1 - JPEG, 2 - PNG
public int avi_JPEG_quality = 90;
public boolean run_ffmpeg = true; // only after AVI
public String video_ext = ".webm";
public String video_codec = "vp8";
// public String video_extra = "-b:v 0 -crf 40"; // extra FFMPEG parameters
public int video_crf = 40; // lower - better, larger file size
public int video_crf = 40; // lower - better, larger file size
public boolean remove_avi = true; // remove avi after conversion to webm
public String video_codec_combo = "vp8"; // applies when combining videos
public int video_crf_combo = 40; // lower - better, larger file size applies when combining videos
public boolean um_mono = true; // applies to both TIFF and AVI
public double um_sigma = 10;
public double um_sigma = 10;
public double um_weight = 0.97; //
public boolean mono_fixed = true; // normalize to fixed range when converting to 8 bits
public double mono_range = 500.0; // monochrome full-scale range (+/- half)
public boolean anaglyth_en = true; // applies to both TIFF and AVI
public static Color anaglyph_left_default = new Color (255, 0, 0); // red
public static Color anaglyph_right_default =new Color (0, 255, 255); // cyan
public Color anaglyph_left = anaglyph_left_default;
public Color anaglyph_right = anaglyph_right_default; // cyan
public boolean annotate_color = true; // annotate pseudo-color video frames with timestamps
public boolean annotate_mono = true; // annotate monochrome video frames with timestamps
public Color annotate_color_color = new Color( 255, 255, 255); // greenish over "fire"
// public Color annotate_color_mono = new Color( 255, 180, 100); // reddish over grey
public Color annotate_color_mono = new Color( 255, 255, 255); // reddish over grey
public Color annotate_color_color =new Color( 255, 255, 255); // greenish over "fire"
public Color annotate_color_mono = new Color( 255, 180, 50); // reddish over grey
public boolean annotate_transparent_mono = false; // // black if not transparent
......@@ -235,8 +248,9 @@ public class IntersceneMatchParameters {
public boolean showMovementDetection(){return (debug_level>0) && mov_show;}
public int movDebugLevel() {return (debug_level > -1) ? mov_debug_level : 0;}
public IntersceneMatchParameters() {
}
......@@ -283,7 +297,7 @@ public class IntersceneMatchParameters {
"Correct in 3D scene images (from all 16 sensors) matching reference (last in sequence) scene with background (BG) priority.");
gd.addCheckbox ("Generate binocular stereo pairs", this.generate_stereo,
"Generate stereo-pairs for 3D-corrected videos (FG,BG). Ebables specific modes (including 0-baseline / mono).");
/*
for (int i = 0; i < stereo_bases.length; i++) {
double base = stereo_bases[i];
String title = (base == 0.0)?
......@@ -294,7 +308,27 @@ public class IntersceneMatchParameters {
"Generate "+base+"mm-baseline stereo scene sequences as Tiff and/or video.";
gd.addCheckbox (title, this.generate_stereo_var[i], tooltip);
}
*/
for (int i = 0; i < stereo_views.length; i++) {
// String stereo_view = doublesToString(stereo_views[i]);
double base = stereo_views[i][0]; // stereo_bases[i];
String ub = String.format("(%.0fmm up, %.0fmm back) ",stereo_views[i][1],stereo_views[i][2]);
if ((stereo_views[i][1]==0) && (stereo_views[i][2]==0)){
ub="";
}
String title = (base == 0.0)?
"Generate mono (single camera) scene sequences"+ub:
"Generate "+base+"mm-baseline stereo scene sequences"+ub;
String tooltip = (base == 0.0)?
"Generate mono (single camera) scene sequences "+ub+"as Tiff and/or video.":
"Generate "+base+"mm-baseline stereo scene sequences "+ub+"as Tiff and/or video.";
gd.addCheckbox (title, this.generate_stereo_var[i], tooltip);
}
gd.addMessage ("Generate/save reference (last) scene images");
gd.addCheckbox ("Export all-sensor images", this.export_images,
"Export multi-slice images: with constant disparity, with foreground disparity, and with background disparity");
......@@ -403,6 +437,7 @@ public class IntersceneMatchParameters {
"Weaker are removed unconditionally (not used now).");
gd.addMessage ("Stereo");
/*
if (stereo_bases.length > 0) {
String [] stereo_choices = new String [stereo_bases.length + 1];
stereo_choices[0] = "--none--";
......@@ -412,8 +447,20 @@ public class IntersceneMatchParameters {
gd. addChoice("Remove stereo-base", stereo_choices, stereo_choices[0],
"Remove selected stereo-base");
}
gd.addStringField("Add another baseline (mm)", "",
"Add another stereo baseline (mm).");
*/
if (stereo_views.length > 0) {
String [] stereo_choices = new String [stereo_views.length + 1];
stereo_choices[0] = "--none--";
for (int i = 0; i < stereo_views.length; i++) {
stereo_choices[i+1] = doublesToString(stereo_views[i])+" mm";
}
gd. addChoice("Remove stereo-view (base, up, back)", stereo_choices, stereo_choices[0],
"Remove selected stereo-view, consisting of streo-base, viewpoint above camera, viewpoint behing camera - all in mm");
}
gd.addStringField("Add another stereo view (baseline, above, behind)", "", 40,
"Add another stereo view by providing baseline, above camera, behind camera (mm).");
// gd.addNumericField("Stereo baseline", this.stereo_baseline, 5,7,"mm",
// "Synthetic 3D with possibly exagerrated stereo baseline");
......@@ -459,16 +506,17 @@ public class IntersceneMatchParameters {
"Converted video extension, starting with dot.");
gd.addStringField ("Video encoder", this.video_codec, 60,
"FFMPEG video encoder, such as \"VP8\" or \"VP9\".");
// gd.addStringField ("Video extra parameters", this.video_extra, 60,
// "FFMPEG video encoder additional parameters, such as, such as \"-b:v 0 -crf 40\".");
gd.addNumericField("Video CRF", this.video_crf, 0,3,"",
gd.addNumericField("Video CRF", this.video_crf, 0,3,"",
"Quality - the lower the better. 40 - OK");
gd.addCheckbox ("Remove AVI", this.remove_avi,
"Remove large AVI files after (and only) conversion with ffmpeg.");
gd.addStringField ("Video encoder for combining", this.video_codec_combo, 60,
"FFMPEG video encoder, such as \"VP8\" or \"VP9\". Applies when merging segments.");
gd.addNumericField("Video CRF for combining", this.video_crf_combo, 0,3,"",
"Quality - the lower the better. 40 - OK. Applies when merging segments.");
gd.addCheckbox ("Remove AVI", this.remove_avi,
"Remove large AVI files after conversion with ffmpeg.");
gd.addCheckbox ("Apply unsharp mask to mono", this.um_mono,
"Apply unsharp mask to monochrome image sequences/video. Applies to TIFF generatiojn too");
gd.addNumericField("Unsharp mask sigma (radius)", this.um_sigma, 5,7,"pix",
......@@ -481,6 +529,18 @@ public class IntersceneMatchParameters {
gd.addNumericField("Monochrome full range", this.mono_range, 5,7,"",
"Monochrome full range to convert to 0..255.");
gd.addCheckbox ("Generate anaglyph stereo", this.anaglyth_en,
"Apply unsharp mask to monochrome image sequences/video. Applies to TIFF generatiojn too");
{String scolor = String.format("%08x", getLongColor(this.anaglyph_left));
gd.addStringField ("Anaglyph color left",scolor, 8, "Any invalid hex number sets default red");}
{String scolor = String.format("%08x", getLongColor(this.anaglyph_right));
gd.addStringField ("Anaglyph color right",scolor, 8, "Any invalid hex number sets default cyan");}
gd.addCheckbox ("Timestamp color videos", this.annotate_color,
......@@ -490,6 +550,7 @@ public class IntersceneMatchParameters {
String scolor = (this.annotate_color_color==null)?"none":String.format("%08x", getLongColor(this.annotate_color_color));
gd.addStringField ("Timestamp color for pseudocolor frames",scolor, 8, "Any invalid hex number disables annotation");
scolor = (this.annotate_color_mono==null)?"none":String.format("%08x", getLongColor(this.annotate_color_mono));
gd.addStringField ("Timestamp color for monochrome frames",scolor, 8, "Any invalid hex number disables annotation");
gd.addCheckbox ("Transparent timestamp background (monochrome)",this.annotate_transparent_mono,
"Put monochrome timestamp over image (unchecked - use black background). Color - always black.");
......@@ -644,9 +705,14 @@ public class IntersceneMatchParameters {
this.generate_fg = gd.getNextBoolean();
this.generate_bg = gd.getNextBoolean();
this.generate_stereo = gd.getNextBoolean();
/*
for (int i = 0; i < stereo_bases.length; i++) {
this.generate_stereo_var[i] = gd.getNextBoolean();
}
*/
for (int i = 0; i < stereo_views.length; i++) {
this.generate_stereo_var[i] = gd.getNextBoolean();
}
this.export_images = gd.getNextBoolean();
this.show_images = gd.getNextBoolean();
......@@ -700,9 +766,10 @@ public class IntersceneMatchParameters {
this.weak_min_neibs = (int) gd.getNextNumber();
this.strong_strength = gd.getNextNumber();
this.weak_strength = gd.getNextNumber();
/*
if (stereo_bases.length > 0) {
int i = gd.getNextChoiceIndex();
int i = gd.getNextChoiceIndex();
if (i > 0) {
removeStereo(i-1);
}
......@@ -717,6 +784,15 @@ public class IntersceneMatchParameters {
}
}
*/
if (stereo_views.length > 0) {
int i = gd.getNextChoiceIndex();
if (i > 0) {
removeStereoView(i-1);
}
}
String s = gd.getNextString();
addStereoView(s, true);
// this.stereo_baseline = gd.getNextNumber();
this.stereo_merge = gd.getNextBoolean();
......@@ -739,14 +815,39 @@ public class IntersceneMatchParameters {
this.run_ffmpeg = gd.getNextBoolean();
this.video_ext= gd.getNextString();
this.video_codec= gd.getNextString();
// this.video_extra= gd.getNextString();
this.video_crf = (int) gd.getNextNumber();
this.remove_avi = gd.getNextBoolean();
this.video_codec_combo= gd.getNextString();
this.video_crf_combo = (int) gd.getNextNumber();
this.um_mono = gd.getNextBoolean();
this.um_sigma = gd.getNextNumber();
this.um_weight = gd.getNextNumber();
this.mono_fixed = gd.getNextBoolean();
this.mono_range = gd.getNextNumber();
this.anaglyth_en = gd.getNextBoolean();
{
String scolor = gd.getNextString();
long lcolor = -1;
try {
lcolor = Long.parseLong(scolor,16);
this.anaglyph_left = setLongColor(lcolor);
} catch(NumberFormatException e){
this.anaglyph_left = anaglyph_left_default;
}
}
{
String scolor = gd.getNextString();
long lcolor = -1;
try {
lcolor = Long.parseLong(scolor,16);
this.anaglyph_right = setLongColor(lcolor);
} catch(NumberFormatException e){
this.anaglyph_right = anaglyph_right_default;
}
}
this.annotate_color = gd.getNextBoolean();
this.annotate_mono = gd.getNextBoolean();
{
......@@ -855,13 +956,21 @@ public class IntersceneMatchParameters {
properties.setProperty(prefix+"generate_fg", this.generate_fg+""); // boolean
properties.setProperty(prefix+"generate_bg", this.generate_bg+""); // boolean
properties.setProperty(prefix+"generate_stereo", this.generate_stereo+""); // boolean
/*
properties.setProperty(prefix+"stereo_bases_num", this.stereo_bases.length+""); // int
for (int i = 0; i < this.stereo_bases.length; i++) {
properties.setProperty(prefix+"stereo_bases_"+i, this.stereo_bases[i]+""); // double
properties.setProperty(prefix+"generate_stereo_var_"+i, this.generate_stereo_var[i]+""); // boolean
}
*/
properties.setProperty(prefix+"stereo_views_num", this.stereo_views.length+""); // int
for (int i = 0; i < this.stereo_views.length; i++) {
properties.setProperty(prefix+"stereo_views_"+i, doublesToString(this.stereo_views[i])); // String
properties.setProperty(prefix+"generate_stereo_var_"+i, this.generate_stereo_var[i]+""); // boolean
}
properties.setProperty(prefix+"export_images", this.export_images + ""); // boolean
properties.setProperty(prefix+"show_images", this.show_images + ""); // boolean
properties.setProperty(prefix+"show_images_bgfg", this.show_images_bgfg + ""); // boolean
......@@ -930,14 +1039,24 @@ public class IntersceneMatchParameters {
properties.setProperty(prefix+"run_ffmpeg", this.run_ffmpeg+""); // boolean
properties.setProperty(prefix+"video_ext", this.video_ext+""); // String
properties.setProperty(prefix+"video_codec", this.video_codec+""); // String
// properties.setProperty(prefix+"video_extra", this.video_extra+""); // String
properties.setProperty(prefix+"video_crf", this.video_crf+""); // int
properties.setProperty(prefix+"remove_avi", this.remove_avi+""); // boolean
properties.setProperty(prefix+"video_codec_combo", this.video_codec_combo+""); // String
properties.setProperty(prefix+"video_crf_combo", this.video_crf_combo+""); // int
properties.setProperty(prefix+"um_mono", this.um_mono+""); // boolean
properties.setProperty(prefix+"um_sigma", this.um_sigma+""); // double
properties.setProperty(prefix+"um_weight", this.um_weight+""); // double
properties.setProperty(prefix+"mono_fixed", this.mono_fixed+""); // boolean
properties.setProperty(prefix+"mono_range", this.mono_range+""); // double
properties.setProperty(prefix+"anaglyth_en", this.anaglyth_en+""); // boolean
properties.setProperty(prefix+"anaglyph_left", getLongColor(anaglyph_left)+""); // Color
properties.setProperty(prefix+"anaglyph_right", getLongColor(anaglyph_right)+""); // Color
properties.setProperty(prefix+"annotate_color", this.annotate_color+""); // boolean
properties.setProperty(prefix+"annotate_mono", this.annotate_mono+""); // boolean
{
......@@ -1031,6 +1150,7 @@ public class IntersceneMatchParameters {
if (properties.getProperty(prefix+"generate_bg")!=null) this.generate_bg=Boolean.parseBoolean(properties.getProperty(prefix+"generate_bg"));
if (properties.getProperty(prefix+"generate_stereo")!=null) this.generate_stereo=Boolean.parseBoolean(properties.getProperty(prefix+"generate_stereo"));
/*
if (properties.getProperty(prefix+"stereo_bases_num")!=null) {
int stereo_bases_num=Integer.parseInt(properties.getProperty(prefix+"stereo_bases_num"));
this.stereo_bases = new double[stereo_bases_num];
......@@ -1041,6 +1161,24 @@ public class IntersceneMatchParameters {
}
orderStereo();
}
*/
if (properties.getProperty(prefix+"stereo_views_num")!=null) {
int stereo_views_num=Integer.parseInt(properties.getProperty(prefix+"stereo_views_num"));
this.stereo_views = new double[stereo_views_num][];
this.generate_stereo_var = new boolean[stereo_views_num];
for (int i = 0; i < stereo_views_num; i++) {
if (properties.getProperty(prefix+"stereo_views_"+i)!=null) {
this.stereo_views[i]=StringToDoubles(properties.getProperty(prefix+"stereo_views_"+i),3);
}
if (properties.getProperty(prefix+"generate_stereo_var_"+i)!=null) {
this.generate_stereo_var[i]=Boolean.parseBoolean(properties.getProperty(prefix+"generate_stereo_var_"+i));
}
}
orderStereoViews();
}
if (properties.getProperty(prefix+"export_images")!=null) this.export_images=Boolean.parseBoolean(properties.getProperty(prefix+"export_images"));
if (properties.getProperty(prefix+"show_images")!=null) this.show_images=Boolean.parseBoolean(properties.getProperty(prefix+"show_images"));
......@@ -1109,14 +1247,32 @@ public class IntersceneMatchParameters {
if (properties.getProperty(prefix+"run_ffmpeg")!=null) this.run_ffmpeg=Boolean.parseBoolean(properties.getProperty(prefix+"run_ffmpeg"));
if (properties.getProperty(prefix+"video_ext")!=null) this.video_ext=(String) properties.getProperty(prefix+"video_ext");
if (properties.getProperty(prefix+"video_codec")!=null) this.video_codec=(String) properties.getProperty(prefix+"video_codec");
/// if (properties.getProperty(prefix+"video_extra")!=null) this.video_extra=(String) properties.getProperty(prefix+"video_extra");
if (properties.getProperty(prefix+"video_crf")!=null) this.video_crf=Integer.parseInt(properties.getProperty(prefix+"video_crf"));
if (properties.getProperty(prefix+"remove_avi")!=null) this.remove_avi=Boolean.parseBoolean(properties.getProperty(prefix+"remove_avi"));
if (properties.getProperty(prefix+"video_codec_combo")!=null) this.video_codec_combo=(String) properties.getProperty(prefix+"video_codec_combo");
if (properties.getProperty(prefix+"video_crf_combo")!=null) this.video_crf_combo=Integer.parseInt(properties.getProperty(prefix+"video_crf_combo"));
if (properties.getProperty(prefix+"um_mono")!=null) this.um_mono=Boolean.parseBoolean(properties.getProperty(prefix+"um_mono"));
if (properties.getProperty(prefix+"um_sigma")!=null) this.um_sigma=Double.parseDouble(properties.getProperty(prefix+"um_sigma"));
if (properties.getProperty(prefix+"um_weight")!=null) this.um_weight=Double.parseDouble(properties.getProperty(prefix+"um_weight"));
if (properties.getProperty(prefix+"mono_fixed")!=null) this.mono_fixed=Boolean.parseBoolean(properties.getProperty(prefix+"mono_fixed"));
if (properties.getProperty(prefix+"mono_range")!=null) this.mono_range=Double.parseDouble(properties.getProperty(prefix+"mono_range"));
if (properties.getProperty(prefix+"anaglyth_en")!=null) this.anaglyth_en=Boolean.parseBoolean(properties.getProperty(prefix+"anaglyth_en"));
if (properties.getProperty(prefix+"anaglyph_left") != null) {
try {
this.anaglyph_left = setLongColor(Long.parseLong(properties.getProperty(prefix+"anaglyph_left")));
} catch(NumberFormatException e){
this.anaglyph_left = anaglyph_left_default;
}
}
if (properties.getProperty(prefix+"anaglyph_right") != null) {
try {
this.anaglyph_right = setLongColor(Long.parseLong(properties.getProperty(prefix+"anaglyph_right")));
} catch(NumberFormatException e){
this.anaglyph_right = anaglyph_right_default;
}
}
if (properties.getProperty(prefix+"annotate_color")!=null) this.annotate_color=Boolean.parseBoolean(properties.getProperty(prefix+"annotate_color"));
if (properties.getProperty(prefix+"annotate_mono")!=null) this.annotate_mono=Boolean.parseBoolean(properties.getProperty(prefix+"annotate_mono"));
......@@ -1216,7 +1372,11 @@ public class IntersceneMatchParameters {
imp.generate_stereo = this.generate_stereo;
imp.stereo_bases = this.stereo_bases.clone();
// imp.stereo_bases = this.stereo_bases.clone();
imp.stereo_views = this.stereo_views.clone();
for (int i = 0; i < this.stereo_views.length; i++) {
imp.stereo_views[i] = this.stereo_views[i].clone();
}
imp.generate_stereo_var = this.generate_stereo_var.clone();
imp.export_images = this.export_images;
......@@ -1286,9 +1446,10 @@ public class IntersceneMatchParameters {
imp.run_ffmpeg = this. run_ffmpeg;
imp.video_ext = this. video_ext;
imp.video_codec = this. video_codec;
// imp.video_extra = this. video_extra;
imp.video_crf = this. video_crf;
imp.remove_avi = this. remove_avi;
imp.video_codec_combo = this. video_codec_combo;
imp.video_crf_combo = this. video_crf_combo;
imp.um_mono = this. um_mono;
imp.um_sigma = this. um_sigma;
......@@ -1296,6 +1457,10 @@ public class IntersceneMatchParameters {
imp.mono_fixed = this. mono_fixed;
imp.mono_range = this. mono_range;
imp.anaglyth_en = this. anaglyth_en;
imp.anaglyph_left = this. anaglyph_left;
imp.anaglyph_right = this. anaglyph_right;
imp.annotate_color = this. annotate_color;
imp.annotate_mono = this. annotate_mono;
imp.annotate_color_color = this. annotate_color_color;
......@@ -1371,7 +1536,8 @@ public class IntersceneMatchParameters {
return new Color((int) lcolor, true);
}
}
/*
public void orderStereo(){
boolean ordered;
do {
......@@ -1390,7 +1556,6 @@ public class IntersceneMatchParameters {
} while (!ordered);
}
public void addStereo(double base, boolean en) {
double [] bases = new double [stereo_bases.length + 1];
boolean [] ens = new boolean [stereo_bases.length + 1];
......@@ -1402,7 +1567,6 @@ public class IntersceneMatchParameters {
generate_stereo_var = ens;
orderStereo();
}
public void removeStereo(int indx) {
if ((indx >=0) && (indx <stereo_bases.length)) {
double [] bases = new double [stereo_bases.length - 1];
......@@ -1419,7 +1583,128 @@ public class IntersceneMatchParameters {
generate_stereo_var = ens;
}
}
*/
public void orderStereoViews(){
boolean ordered;
do {
ordered=true;
for (int i = 0; i < (stereo_views.length - 1); i++) {
if (stereo_views[i+1][0] > stereo_views[i][0]) {
continue;
}
if ( (stereo_views[i+1][0] == stereo_views[i][0]) &&
(stereo_views[i+1][1] > stereo_views[i][1])) {
continue;
}
if ( (stereo_views[i+1][0] == stereo_views[i][0]) &&
(stereo_views[i+1][1] == stereo_views[i][1]) &&
(stereo_views[i+1][2] > stereo_views[i][2])) {
continue;
}
if ( (stereo_views[i+1][0] == stereo_views[i][0]) &&
(stereo_views[i+1][1] == stereo_views[i][1]) &&
(stereo_views[i+1][2] == stereo_views[i][2])) {
// all same values - remove extra
generate_stereo_var[i] |= generate_stereo_var[i+1];
for (int j = i+1; j < (stereo_views.length - 1); j++) {
generate_stereo_var[j] = generate_stereo_var[j+1];
stereo_views[j] = stereo_views[j + 1];
}
ordered = false;
break; // next while
}
boolean en = generate_stereo_var[i+1];
generate_stereo_var[i+1] = generate_stereo_var[i];
generate_stereo_var[i] = en;
double [] view = stereo_views[i+1];
stereo_views[i+1] = stereo_views[i];
stereo_views[i] = view;
ordered = false;
}
} while (!ordered);
return;
}
public void addStereoView(String stereo_view_string, boolean en) {
double[] stereo_view = StringToDoubles(stereo_view_string,3);
if (stereo_view != null) {
addStereoView(stereo_view, en);
}
}
public void addStereoView(double[] stereo_view, boolean en) {
double [][] views = new double [stereo_views.length + 1][];
boolean [] ens = new boolean [stereo_views.length + 1];
views[0] = stereo_view;
ens[0] = en;
System.arraycopy(stereo_views, 0, views, 1, stereo_views.length);
System.arraycopy(generate_stereo_var, 0, ens, 1, stereo_views.length);
stereo_views = views;
generate_stereo_var = ens;
orderStereoViews();
}
public void removeStereoView(int indx) {
if ((indx >=0) && (indx <stereo_views.length)) {
double [][] views = new double [stereo_views.length - 1][];
boolean [] ens = new boolean [stereo_views.length - 1];
if (indx > 0) {
System.arraycopy(stereo_views, 0, views, 0, indx);
System.arraycopy(generate_stereo_var, 0, ens, 0, indx);
}
if (indx < (stereo_views.length - 1)) {
System.arraycopy(stereo_views, indx+1, views, indx, stereo_views.length - indx - 1);
System.arraycopy(generate_stereo_var, indx+1, ens, indx, stereo_views.length - indx - 1);
}
stereo_views = views;
generate_stereo_var = ens;
}
}
public static String doublesToString(double [] data) {
return doublesToString(data, null);
}
public static String doublesToString(double [] data, String fmt) {
if ((fmt == null) || (fmt.trim().length()==0)) {
fmt = "%.0f";
}
String s = "";
for (int i = 0; i < data.length; i++) {
s+=String.format(fmt,data[i]);
if (i < (data.length - 1)) {
s+= ", ";
}
}
return s;
}
public static double [] StringToDoubles(String s, int len) {
StringTokenizer st = new StringTokenizer(s, " \t\n\r\f,");
if (st.countTokens() == 0) {
return null;
}
if (len <= 0) {
len = st.countTokens();
}
double [] data = new double [len];
int i = 0;
while (st.hasMoreTokens() && (i < len)) {
double d = 0;
try {
d = Double.parseDouble(st.nextToken());
} catch(NumberFormatException e){
d = 0;
}
data[i++] = d;
}
return data;
}
}
......@@ -2780,9 +2780,9 @@ public class OpticalFlow {
}
}
}
final boolean ref_is_identity =
(scene_xyz[0]==0.0) && (scene_xyz[1]==0.0) && (scene_xyz[1]==0.0) &&
(scene_atr[0]==0.0) && (scene_atr[1]==0.0) && (scene_atr[1]==0.0);
final boolean ref_is_identity = false;
/// (scene_xyz[0]==0.0) && (scene_xyz[1]==0.0) && (scene_xyz[2]==0.0) &&
/// (scene_atr[0]==0.0) && (scene_atr[1]==0.0) && (scene_atr[2]==0.0);
final double [] disparity_ref = dref;
// final int tilesX_ref = ref_w;
// final int tilesY_ref = ref_h;
......@@ -3963,7 +3963,7 @@ public class OpticalFlow {
boolean batch_mode,
QuadCLT quadCLT_main, // tiles should be set
int ref_index, // -1 - last
int ref_step,
// int start_index,
CLTParameters clt_parameters,
EyesisCorrectionParameters.DebayerParameters debayerParameters,
ColorProcParameters colorProcParameters,
......@@ -3973,7 +3973,8 @@ public class OpticalFlow {
Properties properties,
boolean reset_from_extrinsics,
String [][] videos, // null or String[1][] list of generated avi or webm paths
int [][] stereo_widths, // null or int[1][] matching videos -
int [][] stereo_widths, // null or int[1][] matching videos -
int [] start_ref_pointers, // [0] - earliest valid scene, [1] ref_index
// each element is 0 for non-stereo and full width for stereo
final int threadsMax, // maximal number of threads to launch
final boolean updateStatus,
......@@ -4032,11 +4033,15 @@ public class OpticalFlow {
clt_parameters.imp.generate_bg};
boolean generate_stereo = clt_parameters.imp.generate_stereo;
double [] stereo_bases = clt_parameters.imp.stereo_bases; // {0.0, 200.0, 500.0, 1000.0};
// double [] stereo_bases = clt_parameters.imp.stereo_bases; // {0.0, 200.0, 500.0, 1000.0};
double [][] stereo_views = clt_parameters.imp.stereo_views; // {0.0, 200.0, 500.0, 1000.0};
boolean [] generate_stereo_var = clt_parameters.imp.generate_stereo_var;
boolean stereo_merge = clt_parameters.imp.stereo_merge;
boolean anaglyth_en = clt_parameters.imp.anaglyth_en;
final Color anaglyph_left = clt_parameters.imp.anaglyph_left;
final Color anaglyph_right = clt_parameters.imp.anaglyph_right;
int stereo_gap = clt_parameters.imp.stereo_gap;
// double stereo_intereye = clt_parameters.imp.stereo_intereye;
// double stereo_phone_width = clt_parameters.imp.stereo_phone_width;
......@@ -4061,7 +4066,7 @@ public class OpticalFlow {
boolean mono_fixed = clt_parameters.imp.mono_fixed;
double mono_range = clt_parameters.imp.mono_range;
boolean reuse_video = clt_parameters.imp.reuse_video &&
boolean reuse_video = clt_parameters.imp.reuse_video &&
(gen_seq_mono_color[0] || gen_seq_mono_color[1]); // generate sequences - Tiff and/or video
......@@ -4098,11 +4103,21 @@ public class OpticalFlow {
System.out.println("buildSeriesTQ(): No files to process (of "+sourceFiles0.length+")");
return null;
}
QuadCLT.SetChannels [] set_channels=quadCLT_main.setChannels(debugLevel);
QuadCLT [] quadCLTs = new QuadCLT [set_channels.length];
// set_channels will include all 99 scenes even as quadCLTs.length matches ref_index
QuadCLT.SetChannels [] set_channels=quadCLT_main.setChannels(debugLevel);
if (ref_index < 0) {
ref_index += quadCLTs.length;
ref_index += set_channels.length;
}
if (start_ref_pointers != null) {
start_ref_pointers[0] = 0;
start_ref_pointers[1] = ref_index;
}
QuadCLT [] quadCLTs = new QuadCLT [ref_index+1]; // [set_channels.length];
//start_index
double [][][] scenes_xyzatr = new double [quadCLTs.length][][]; // previous scene relative to the next one
scenes_xyzatr[ref_index] = new double[2][3]; // all zeros
// See if build_ref_dsi is needed
......@@ -4315,6 +4330,9 @@ public class OpticalFlow {
if ((ref_index - earliest_scene + 1) < min_num_scenes) {
System.out.println("Total number of useful scenes = "+(ref_index - earliest_scene + 1)+
" < "+min_num_scenes+". Scrapping this series.");
if (start_ref_pointers != null) {
start_ref_pointers[0] = earliest_scene;
}
return null;
}
if (earliest_scene > 0) {
......@@ -4326,7 +4344,7 @@ public class OpticalFlow {
}
quadCLTs[ref_index].set_orient(1); // first orientation
quadCLTs[ref_index].set_accum(0); // reset accumulations ("build_interscene") number
quadCLTs[ref_index].saveInterProperties( // save properties for interscene processing (extrinsics, ers, ...)
quadCLTs[ref_index].saveInterProperties( // save properties for interscene processing (extrinsics, ers, ...) // null pointer
null, // String path, // full name with extension or w/o path to use x3d directory
debugLevel+1);
} else {// if (build_orientations) {
......@@ -4341,11 +4359,11 @@ public class OpticalFlow {
}
}
}
// just in case that orientations were calculated before:
earliest_scene = getEarliestScene(quadCLTs);
double [][] combo_dsn_final = null;
while (!reuse_video && ((quadCLTs[ref_index].getNumOrient() < min_num_orient) || (quadCLTs[ref_index].getNumAccum() < min_num_interscene))) {
// if (build_interscene) {
// start with interscene accumulations if number of accumulations is less than number of performed
// orientations or no orientations is needed
if ((quadCLTs[ref_index].getNumAccum() < min_num_interscene) &&
((quadCLTs[ref_index].getNumAccum() < quadCLTs[ref_index].getNumOrient())||
(quadCLTs[ref_index].getNumOrient() >= min_num_orient))) {
......@@ -4389,6 +4407,9 @@ public class OpticalFlow {
if ((ref_index - earliest_scene + 1) < min_num_scenes) {
System.out.println("After reAdjustPairsLMAInterscene() total number of useful scenes = "+(ref_index - earliest_scene + 1)+
" < "+min_num_scenes+". Scrapping this series.");
if (start_ref_pointers != null) {
start_ref_pointers[0] = earliest_scene;
}
return null;
}
if (earliest_scene > 0) {
......@@ -4404,21 +4425,6 @@ public class OpticalFlow {
if (test_ers) { // only debug feature
test_ers0 = quadCLTs.length -1; // make it always == reference !
//Already done in any case
/*
if (!force_initial_orientations && !build_interscene && !readjust_orient) {
for (int nscene = 0; nscene < (quadCLTs.length -1); nscene++) {
if ((Math.abs(nscene - test_ers0) <= 1) || (Math.abs(nscene - test_ers1) <= 1)) {
quadCLTs[nscene] = (QuadCLT) quadCLT_main.spawnNoModelQuadCLT( // restores image data
set_channels[nscene].set_name,
clt_parameters,
colorProcParameters, //
threadsMax,
debugLevel-2);
}
}
}
*/
testERS(
clt_parameters, // CLTParameters clt_parameters,
......@@ -4432,22 +4438,9 @@ public class OpticalFlow {
return quadCLTs[ref_index].getX3dTopDirectory();
}
// generates 3-dmodes, colors, stereos, tiffs/videos
// generates 3-d modes, colors, stereos, tiffs/videos
if (generate_mapped || reuse_video) {
// already done in any case
/*
if (!force_initial_orientations && !build_interscene) {
for (int scene_index = ref_index - 1; scene_index >= 0 ; scene_index--) {
quadCLTs[scene_index] = (QuadCLT) quadCLT_main.spawnNoModelQuadCLT( // restores image data
set_channels[scene_index].set_name,
clt_parameters,
colorProcParameters, //
threadsMax,
debugLevel-2);
}
}
*/
int tilesX = quadCLTs[ref_index].getTileProcessor().getTilesX();
int tilesY = quadCLTs[ref_index].getTileProcessor().getTilesY();
double [] disparity_fg = null;
......@@ -4560,14 +4553,21 @@ public class OpticalFlow {
}
boolean is_3d = mode3d > 0;
boolean gen_stereo = is_3d && generate_stereo;
double [] baselines = (gen_stereo)? stereo_bases : new double[] {0.0};
for (int ibase = 0; ibase < baselines.length; ibase++) if (!gen_stereo || generate_stereo_var[ibase]) {
double stereo_baseline = gen_stereo? stereo_bases[ibase] : 0.0;
boolean is_stereo = gen_stereo && stereo_baseline > 0;
double stereo_baseline_meters = 0.001 * stereo_baseline;
// col_mode: 0 - mono, 1 - color
// double [] baselines = (gen_stereo)? stereo_bases : new double[] {0.0};
double [][] views = (gen_stereo)? stereo_views : new double [][] {{0.0,0.0,0.0}};
// for (int ibase = 0; ibase < baselines.length; ibase++) if (!gen_stereo || generate_stereo_var[ibase]) {
for (int ibase = 0; ibase < views.length; ibase++) if (!gen_stereo || generate_stereo_var[ibase]) {
// double stereo_baseline = gen_stereo? stereo_bases[ibase] : 0.0;
double stereo_baseline = gen_stereo? views[ibase][0] : 0.0;
boolean is_stereo = gen_stereo && stereo_baseline > 0;
double stereo_baseline_meters = 0.001 * stereo_baseline;
double view_height_meters = 0.001 * views[ibase][1];
double view_back_meters = 0.001 * views[ibase][2];
// double stereo_back = 3.0; // 0; // -10.0; // meters
// col_mode: 0 - mono, 1 - color
for (int col_mode = 0; col_mode < 2; col_mode++) if (gen_seq_mono_color[col_mode]){ // skip if not needed
double[] selected_disparity = (mode3d > 1)?disparity_bg:((mode3d > 0)?disparity_fg: disparity_raw);
double[] selected_disparity = (mode3d > 1)?disparity_bg:((mode3d > 0)?disparity_fg: disparity_raw);
final boolean toRGB = col_mode > 0;
String scenes_suffix = quadCLTs[quadCLTs.length-1].getImageName()+
"-SEQ-" + IntersceneMatchParameters.MODES3D[mode3d+1] + "-"+(toRGB?"COLOR":"MONO");
......@@ -4575,18 +4575,25 @@ public class OpticalFlow {
scenes_suffix+=String.format("-UM%.1f_%.2f",um_sigma,um_weight);
}
int num_stereo = (is_stereo && (mode3d > 0))? 2:1; // only for 3D views
boolean combine_left_right = (num_stereo > 1) && stereo_merge;
boolean combine_left_right = (num_stereo > 1) && (stereo_merge || (anaglyth_en && !toRGB));
ImagePlus [] imp_scenes_pair = new ImagePlus[num_stereo];
String scenes_suffix_pair = scenes_suffix;
for (int nstereo = 0; nstereo < num_stereo; nstereo++) {
double [] xyz_offset = {
-stereo_baseline_meters * (nstereo - 0.5) * (num_stereo - 1), // x offset
0.0, // Y offset
0.0}; // Z offset
-view_height_meters, // Y offset
-view_back_meters}; // Z offset
if (num_stereo > 1) {
scenes_suffix = scenes_suffix_pair + ((nstereo > 0)?"-RIGHT":"-LEFT"); // check if opposite
scenes_suffix += stereo_baseline;
scenes_suffix += "-B"+views[ibase][0];
}
if (views[ibase][1] != 0) {
scenes_suffix += "-Y"+views[ibase][1];
}
if (views[ibase][2] != 0) {
scenes_suffix += "-Z"+views[ibase][2];
}
if (generate_mapped) {
imp_scenes_pair[nstereo]= renderSceneSequence(
clt_parameters, // CLTParameters clt_parameters,
......@@ -4611,11 +4618,11 @@ public class OpticalFlow {
FloatProcessor fp = new FloatProcessor(
fov_tiles.width*quadCLTs[ref_index].getTileProcessor().getTileSize(),
fov_tiles.height*quadCLTs[ref_index].getTileProcessor().getTileSize());
boolean merge_all = clt_parameters.imp.merge_all;
if (mode3d < 1) {
merge_all = false;
}
boolean merge_all = clt_parameters.imp.merge_all;
if (mode3d < 1) {
merge_all = false;
}
imp_scenes_pair[nstereo]=new ImagePlus(scenes_suffix+((mode3d > 0)?(merge_all?"-MERGED":"-SINGLE"):""), fp);
}
// Save as AVI
......@@ -4662,117 +4669,186 @@ public class OpticalFlow {
if (combine_left_right && (nstereo == 0)) {
continue;
}
if (combine_left_right) { // combine pairs multi-threaded
// stack_scenes = new ImageStack(imp_scene.getWidth(),imp_scene.getHeight());
final int left_width = imp_scenes_pair[0].getWidth();
final int right_width = imp_scenes_pair[1].getWidth();
final int stereo_width = left_width + right_width+stereo_gap;
final int stereo_height = imp_scenes_pair[0].getHeight();
final ImageStack stereo_stack = new ImageStack(stereo_width, stereo_height);
final int nSlices = imp_scenes_pair[0].getStack().getSize();
for (int i = 0; i < nSlices; i++) {
stereo_stack.addSlice(
imp_scenes_pair[0].getStack().getSliceLabel(i+1),
new int[stereo_width * stereo_height]);
}
if (generate_mapped) {
final Thread[] threads = ImageDtt.newThreadArray(QuadCLT.THREADS_MAX);
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nSlice = ai.getAndIncrement(); nSlice < nSlices; nSlice = ai.getAndIncrement()) {
int[] pixels_stereo = (int[]) stereo_stack.getPixels(nSlice+1);
int[] pixels_left = (int[]) imp_scenes_pair[0].getStack().getPixels(nSlice+1);
int[] pixels_right = (int[]) imp_scenes_pair[1].getStack().getPixels(nSlice+1);
for (int row = 0; row < stereo_height; row++) {
System.arraycopy(
pixels_left,
left_width * row,
pixels_stereo,
stereo_width * row,
left_width);
System.arraycopy(
pixels_right,
right_width * row,
pixels_stereo,
stereo_width * row + left_width + stereo_gap,
right_width);
// no_combine, stereo_2_images, stereo_anaglyth
ImagePlus imp_video = imp_scenes_pair[nstereo];
boolean [] combine_modes = {!combine_left_right, stereo_merge && combine_left_right, anaglyth_en && !toRGB && combine_left_right };
for (int istereo_mode = 0; istereo_mode < combine_modes.length; istereo_mode++) if(combine_modes[istereo_mode]) {
// if (combine_left_right) { // combine pairs multi-threaded
if (istereo_mode == 1) { // combine pairs for "Google" VR
final int left_width = imp_scenes_pair[0].getWidth();
final int right_width = imp_scenes_pair[1].getWidth();
final int stereo_width = left_width + right_width+stereo_gap;
final int stereo_height = imp_scenes_pair[0].getHeight();
final ImageStack stereo_stack = new ImageStack(stereo_width, stereo_height);
final int nSlices = imp_scenes_pair[0].getStack().getSize();
for (int i = 0; i < nSlices; i++) {
stereo_stack.addSlice(
imp_scenes_pair[0].getStack().getSliceLabel(i+1),
new int[stereo_width * stereo_height]);
}
if (generate_mapped) {
final Thread[] threads = ImageDtt.newThreadArray(QuadCLT.THREADS_MAX);
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nSlice = ai.getAndIncrement(); nSlice < nSlices; nSlice = ai.getAndIncrement()) {
int[] pixels_stereo = (int[]) stereo_stack.getPixels(nSlice+1);
int[] pixels_left = (int[]) imp_scenes_pair[0].getStack().getPixels(nSlice+1);
int[] pixels_right = (int[]) imp_scenes_pair[1].getStack().getPixels(nSlice+1);
for (int row = 0; row < stereo_height; row++) {
System.arraycopy(
pixels_left,
left_width * row,
pixels_stereo,
stereo_width * row,
left_width);
System.arraycopy(
pixels_right,
right_width * row,
pixels_stereo,
stereo_width * row + left_width + stereo_gap,
right_width);
}
}
}
}
};
}
ImageDtt.startAndJoin(threads);
}
// convert stereo_stack to imp_scenes_pair[1], keeping calibration and fps?
imp_scenes_pair[1].setStack(stereo_stack);
String title = imp_scenes_pair[1].getTitle();
imp_scenes_pair[1].setTitle(title.replace("-RIGHT","-STEREO"));
}
String avi_path=null;
video:
{
try {
avi_path=quadCLTs[ref_index].saveAVIInModelDirectory(
!generate_mapped, // boolean dry_run,
null, // "GPU-SHIFTED-D"+clt_parameters.disparity, // String suffix,
mode_avi, // int avi_mode,
avi_JPEG_quality, // int avi_JPEG_quality,
video_fps, // double fps,
imp_scenes_pair[nstereo]); // ImagePlus imp)
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
break video;
}
// Convert with ffmpeg?
if (avi_path == null) {
break video;
}
int img_width=imp_scenes_pair[nstereo].getWidth();
int stereo_width = combine_left_right? img_width:0;
stereo_widths_list.add(stereo_width);
if (!run_ffmpeg) {
video_list.add(avi_path);
break video; // webm not requested
}
String webm_path = avi_path.substring(0, avi_path.length()-4)+video_ext;
// added -y not to as "overwrite y/n?"
String shellCommand = String.format("ffmpeg -y -i %s -c %s -b:v 0 -crf %d %s",
avi_path, video_codec, video_crf, webm_path);
Process p = null;
if (generate_mapped) {
int exit_code = -1;
};
}
ImageDtt.startAndJoin(threads);
}
imp_video = new ImagePlus();
imp_video.setImage(imp_scenes_pair[1]); // copy many attributes
imp_video.setStack(stereo_stack);
String title = imp_scenes_pair[1].getTitle();
imp_video.setTitle(title.replace("-RIGHT","-STEREO"));
// convert stereo_stack to imp_scenes_pair[1], keeping calibration and fps?
/// imp_scenes_pair[1].setStack(stereo_stack);
/// String title = imp_scenes_pair[1].getTitle();
/// imp_video = new ImagePlus(
/// imp_scenes_pair[1].getTitle().replace("-RIGHT","-STEREO"),
/// stereo_stack);
/// imp_scenes_pair[1].setTitle(title.replace("-RIGHT","-STEREO"));
} else if (istereo_mode == 2) { // combine anaglyph
// final Color anaglyph_left = clt_parameters.imp.anaglyph_left;
// final Color anaglyph_right = clt_parameters.imp.anaglyph_right;
final double [] left_rgb= {
anaglyph_left.getRed()/255.0,
anaglyph_left.getGreen()/255.0,
anaglyph_left.getBlue()/255.0};
final double [] right_rgb= {
anaglyph_right.getRed()/255.0,
anaglyph_right.getGreen()/255.0,
anaglyph_right.getBlue()/255.0};
final int left_width = imp_scenes_pair[0].getWidth();
final int left_height = imp_scenes_pair[0].getHeight();
final int nSlices = imp_scenes_pair[0].getStack().getSize();
final ImageStack stereo_stack = new ImageStack(left_width, left_height);
for (int i = 0; i < nSlices; i++) {
stereo_stack.addSlice(
imp_scenes_pair[0].getStack().getSliceLabel(i+1),
new int[left_width * left_height]);
}
if (generate_mapped) {
final Thread[] threads = ImageDtt.newThreadArray(QuadCLT.THREADS_MAX);
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
int [] rgb = new int [3];
for (int nSlice = ai.getAndIncrement(); nSlice < nSlices; nSlice = ai.getAndIncrement()) {
int[] pixels_stereo = (int[]) stereo_stack.getPixels(nSlice+1);
int[] pixels_left = (int[]) imp_scenes_pair[0].getStack().getPixels(nSlice+1);
int[] pixels_right = (int[]) imp_scenes_pair[1].getStack().getPixels(nSlice+1);
for (int pix = 0; pix < pixels_left.length; pix++) {
int gl = ((pixels_left[pix] & 0xff00) >> 8);
int gr = ((pixels_right[pix] & 0xff00) >> 8);
rgb[0] = ((int) Math.min(gl * left_rgb[0] + gr * right_rgb[0], 255)) & 0xff;
rgb[1] = ((int) Math.min(gl * left_rgb[1] + gr * right_rgb[1], 255)) & 0xff;
rgb[2] = ((int) Math.min(gl * left_rgb[2] + gr * right_rgb[2], 255)) & 0xff;
pixels_stereo[pix] = 0xff000000 + (rgb[0] << 16) + (rgb[1] << 8) + rgb[2];
}
}
}
};
}
ImageDtt.startAndJoin(threads);
}
imp_video = new ImagePlus();
imp_video.setImage(imp_scenes_pair[1]); // copy many attributes
imp_video.setStack(stereo_stack);
String title = imp_scenes_pair[1].getTitle();
imp_video.setTitle(title.replace("-RIGHT","-ANAGLYPH"));
/// String title = imp_scenes_pair[1].getTitle();
/// imp_scenes_pair[1].setTitle(title.replace("-RIGHT","-ANAGLYPH"));
} // if (istereo_mode == 1) {if (combine_left_right) { // combine pairs multi-threaded
String avi_path=null;
video:
{
try {
p = Runtime.getRuntime().exec(shellCommand);
avi_path=quadCLTs[ref_index].saveAVIInModelDirectory(
!generate_mapped, // boolean dry_run,
null, // "GPU-SHIFTED-D"+clt_parameters.disparity, // String suffix,
mode_avi, // int avi_mode,
avi_JPEG_quality, // int avi_JPEG_quality,
video_fps, // double fps,
imp_video); // imp_scenes_pair[nstereo]); // ImagePlus imp)
} catch (IOException e) {
System.out.println("Failed shell command: \""+shellCommand+"\"");
// TODO Auto-generated catch block
e.printStackTrace();
break video;
}
if (p != null) {
p.waitFor();
exit_code = p.exitValue();
// Convert with ffmpeg?
if (avi_path == null) {
break video;
}
System.out.println("Ran shell command: \""+shellCommand+"\" -> "+exit_code);
// Check if webm file exists
if ((exit_code != 0) || !(new File(webm_path)).exists()) {
System.out.println("Failed to create : \""+webm_path+"\"");
// int img_width=imp_scenes_pair[nstereo].getWidth();
int img_width=imp_video.getWidth();
int stereo_width = combine_left_right? img_width:0;
stereo_widths_list.add(stereo_width);
if (!run_ffmpeg) {
video_list.add(avi_path);
break video;
break video; // webm not requested
}
String webm_path = avi_path.substring(0, avi_path.length()-4)+video_ext;
// added -y not to as "overwrite y/n?"
String shellCommand = String.format("ffmpeg -y -i %s -c %s -b:v 0 -crf %d %s",
avi_path, video_codec, video_crf, webm_path);
Process p = null;
if (generate_mapped) {
int exit_code = -1;
try {
p = Runtime.getRuntime().exec(shellCommand);
} catch (IOException e) {
System.out.println("Failed shell command: \""+shellCommand+"\"");
}
if (p != null) {
p.waitFor();
exit_code = p.exitValue();
}
System.out.println("Ran shell command: \""+shellCommand+"\" -> "+exit_code);
// Check if webm file exists
if ((exit_code != 0) || !(new File(webm_path)).exists()) {
System.out.println("Failed to create : \""+webm_path+"\"");
video_list.add(avi_path);
break video;
}
} else {
System.out.println("Simulated shell command: \""+shellCommand);
}
video_list.add(webm_path);
if (remove_avi && generate_mapped) {
(new File(avi_path)).delete();
System.out.println("Deleted AVI video file: \""+avi_path+"\"");
}
} else {
System.out.println("Simulated shell command: \""+shellCommand);
}
video_list.add(webm_path);
if (remove_avi && generate_mapped) {
(new File(avi_path)).delete();
System.out.println("Deleted AVI video file: \""+avi_path+"\"");
}
}
}
} // for (int istereo_mode = 0; istereo_mode < stereo_modes.length; istereo_mode++) if(combine_modes[istereo_mode]) {
} // if (gen_avi_mono_color[col_mode])
if (show_mono_color[col_mode] && generate_mapped) {
imp_scenes_pair[nstereo].show();
}
......@@ -4874,46 +4950,72 @@ public class OpticalFlow {
imp_constant_mono.show();
}
}
ImagePlus imp_fg = QuadCLT.renderGPUFromDSI(
-1, // final int sensor_mask,
false, // final boolean merge_channels,
null, // final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters, // CLTParameters clt_parameters,
fg_disparity, // double [] disparity_ref,
ZERO3, // final double [] scene_xyz, // camera center in world coordinates
ZERO3, // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs[ref_index], // final QuadCLT scene,
quadCLTs[ref_index], // final QuadCLT ref_scene, // now - may be null - for testing if scene is rotated ref
true, // toRGB, // final boolean toRGB,
"GPU-SHIFTED-FOREGROUND", // String suffix,
threadsMax, // int threadsMax,
debugLevel); // int debugLevel)
quadCLTs[ref_index].saveImagePlusInModelDirectory(
null, // "GPU-SHIFTED-FOREGROUND", // String suffix,
imp_fg); // ImagePlus imp)
ImagePlus imp_fg_mono = QuadCLT.renderGPUFromDSI(
-1, // final int sensor_mask,
false, // final boolean merge_channels,
null, // final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters, // CLTParameters clt_parameters,
fg_disparity, // double [] disparity_ref,
ZERO3, // final double [] scene_xyz, // camera center in world coordinates
ZERO3, // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs[ref_index], // final QuadCLT scene,
quadCLTs[ref_index], // final QuadCLT ref_scene, // now - may be null - for testing if scene is rotated ref
false, // toRGB, // final boolean toRGB,
"GPU-SHIFTED-FOREGROUND", // String suffix,
threadsMax, // int threadsMax,
debugLevel); // int debugLevel)
quadCLTs[ref_index].saveImagePlusInModelDirectory(
null, // "GPU-SHIFTED-FOREGROUND", // String suffix,
imp_fg_mono); // ImagePlus imp)
if (show_images && show_images_bgfg) {
imp_fg.show();
if (show_images_mono) {
imp_fg_mono.show();
boolean offset_fg_image = true; // config later, generate FG image for all stereo views
double [][] img_views = offset_fg_image ? stereo_views : (new double [][] {{0,0,0}});
for (int ibase = 0; ibase < img_views.length; ibase++) if (!offset_fg_image || generate_stereo_var[ibase]) {
double stereo_baseline_meters = 0.001 * img_views[ibase][0];
double view_height_meters = 0.001 * img_views[ibase][1];
double view_back_meters = 0.001 * img_views[ibase][2];
double [] xyz_offset = {
-stereo_baseline_meters, // x offset
-view_height_meters, // Y offset
-view_back_meters}; // Z offset
String scenes_suffix = "";
if (img_views[ibase][0] != 0) {
scenes_suffix += "-B"+img_views[ibase][0];
}
if (img_views[ibase][1] != 0) {
scenes_suffix += "-Y"+img_views[ibase][1];
}
if (img_views[ibase][2] != 0) {
scenes_suffix += "-Z"+img_views[ibase][2];
}
ImagePlus imp_fg = QuadCLT.renderGPUFromDSI(
-1, // final int sensor_mask,
false, // final boolean merge_channels,
null, // final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters, // CLTParameters clt_parameters,
fg_disparity, // double [] disparity_ref,
xyz_offset, // ZERO3, // final double [] scene_xyz, // camera center in world coordinates
ZERO3, // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs[ref_index], // final QuadCLT scene,
quadCLTs[ref_index], // final QuadCLT ref_scene, // now - may be null - for testing if scene is rotated ref
true, // toRGB, // final boolean toRGB,
scenes_suffix+"GPU-SHIFTED-FOREGROUND", // String suffix,
threadsMax, // int threadsMax,
debugLevel); // int debugLevel)
quadCLTs[ref_index].saveImagePlusInModelDirectory(
null, // "GPU-SHIFTED-FOREGROUND", // String suffix,
imp_fg); // ImagePlus imp)
ImagePlus imp_fg_mono = QuadCLT.renderGPUFromDSI(
-1, // final int sensor_mask,
false, // final boolean merge_channels,
null, // final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters, // CLTParameters clt_parameters,
fg_disparity, // double [] disparity_ref,
xyz_offset, // ZERO3, // final double [] scene_xyz, // camera center in world coordinates
ZERO3, // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs[ref_index], // final QuadCLT scene,
quadCLTs[ref_index], // final QuadCLT ref_scene, // now - may be null - for testing if scene is rotated ref
false, // toRGB, // final boolean toRGB,
scenes_suffix+"GPU-SHIFTED-FOREGROUND", // String suffix,
threadsMax, // int threadsMax,
debugLevel); // int debugLevel)
quadCLTs[ref_index].saveImagePlusInModelDirectory(
null, // "GPU-SHIFTED-FOREGROUND", // String suffix,
imp_fg_mono); // ImagePlus imp)
if (show_images && show_images_bgfg) {
imp_fg.show();
if (show_images_mono) {
imp_fg_mono.show();
}
}
}
ImagePlus imp_bg = QuadCLT.renderGPUFromDSI(
-1, // final int sensor_mask,
false, // final boolean merge_channels,
......@@ -5019,7 +5121,6 @@ public class OpticalFlow {
combo_dsn_final, // double [][] combo_dsn_final, // dls,
quadCLTs[ref_index], // QuadCLT scene,
debugLevel); // int debugLevel);// > 0
// FIXME: Adjust for incomplete series!!!!!
intersceneMlExport(
clt_parameters, // CLTParameters clt_parameters,
ers_reference, // ErsCorrection ers_reference,
......@@ -5030,8 +5131,6 @@ public class OpticalFlow {
}
// ArrayList<String> video_list = new ArrayList<String>();
if (videos != null) {
videos[0] = video_list.toArray(new String[0]);
}
......@@ -5041,21 +5140,20 @@ public class OpticalFlow {
stereo_widths[0][i] = stereo_widths_list.get(i);
}
}
if (start_ref_pointers != null) {
start_ref_pointers[0] = earliest_scene;
}
System.out.println("buildSeries(): DONE"); //
return quadCLTs[ref_index].getX3dTopDirectory();
// return true;
}
public void testERS(
CLTParameters clt_parameters,
int indx0, // reference scene in a pair
int indx1, // other scene in a pair
// double [] ref_disparity,
QuadCLT [] quadCLTs,
int debugLevel) {
// First create a pair of images, similar to renderSceneSequence()
// boolean toRGB = true;
boolean show_color = clt_parameters.imp.show_mapped_color;
boolean show_mono = clt_parameters.imp.show_mapped_mono;
boolean use_combo_dsi = clt_parameters.imp.use_combo_dsi;
......@@ -5083,7 +5181,6 @@ public class OpticalFlow {
quadCLTs[ref_index], // QuadCLT scene,
debugLevel);
double [] disparity_fg = ds[0]; // combo_dsn_final[COMBO_DSN_INDX_DISP_FG];
// double d
double [] interscene_ref_disparity = null; // keep null to use old single-scene disparity for interscene matching
if (use_combo_dsi) {
interscene_ref_disparity = ds[0].clone(); // use_lma_dsi ?
......@@ -5096,7 +5193,6 @@ public class OpticalFlow {
}
}
// QuadCLT [] other_ref = {quadCLTs[indx1],quadCLTs[indx0]};
int [] other_ref = {indx1, indx0};
ErsCorrection ers_reference = quadCLTs[ref_index].getErsCorrection();
ImageStack stack_scenes_color = null;
......@@ -5428,14 +5524,23 @@ public class OpticalFlow {
String ts = quadCLTs[nscene].getImageName();
double [] scene_xyz = ZERO3;
double [] scene_atr = ZERO3;
if ((nscene != ref_index) && (mode3d >= 0)) {
// if ((nscene != ref_index) && (mode3d >= 0)) {
if (nscene != ref_index) { // Check even for raw, so video frames will match in all modes
scene_xyz = ers_reference.getSceneXYZ(ts);
scene_atr = ers_reference.getSceneATR(ts);
double [] scene_ers_xyz_dt = ers_reference.getSceneErsXYZ_dt(ts);
double [] scene_ers_atr_dt = ers_reference.getSceneErsATR_dt(ts);
quadCLTs[nscene].getErsCorrection().setErsDt(
scene_ers_xyz_dt, // double [] ers_xyz_dt,
scene_ers_atr_dt); // double [] ers_atr_dt)(ers_scene_original_xyz_dt);
if ((scene_atr==null) || (scene_xyz == null)) {
continue;
}
if (mode3d >= 0) {
double [] scene_ers_xyz_dt = ers_reference.getSceneErsXYZ_dt(ts);
double [] scene_ers_atr_dt = ers_reference.getSceneErsATR_dt(ts);
quadCLTs[nscene].getErsCorrection().setErsDt(
scene_ers_xyz_dt, // double [] ers_xyz_dt,
scene_ers_atr_dt); // double [] ers_atr_dt)(ers_scene_original_xyz_dt);
} else { // ugly, restore for raw mode that should not be rotated/shifted
scene_xyz = ZERO3;
scene_atr = ZERO3;
}
}
if (stereo_xyz != null) { // offset all, including reference scene
double [][] combo_xyzatr = ErsCorrection.combineXYZATR(
......@@ -9533,6 +9638,21 @@ public double[][] correlateIntersceneDebug( // only uses GPU and quad
return disparity_map; // disparity_map
}
public static int getEarliestScene(QuadCLT [] scenes)
{
int ref_index = scenes.length - 1;
ErsCorrection ers_reference = scenes[ref_index].getErsCorrection();
for (int nscene = ref_index-1; nscene >= 00; nscene--) {
String ts = scenes[nscene].getImageName();
double [] scene_xyz = ers_reference.getSceneXYZ(ts);
double [] scene_atr = ers_reference.getSceneATR(ts);
if ((scene_xyz == null) || (scene_atr == null)){
return nscene + 1; // scene is not matched
}
}
return 0;
}
// Cleaned up and optimized version to reduce memory usage (on-the-fly integration, not saving full correlation data)
public double[][] correlateInterscene(
final CLTParameters clt_parameters,
......
......@@ -613,7 +613,7 @@ public class QuadCLT extends QuadCLTCPU {
break;
}
fill_all[0] = anum_gaps.get() == 0; // no new tiles filled
if (npass == (num_passes-1)){
if ((debug_level>0) && (npass == (num_passes-1))){
System.out.println("fillDisparityStrength() LAST PASS ! npass="+npass+", change="+Math.sqrt(amax_diff.get())+" ("+max_change+")");
System.out.println("fillDisparityStrength() LAST PASS ! npass="+npass+", change="+Math.sqrt(amax_diff.get())+" ("+max_change+")");
System.out.println("fillDisparityStrength() LAST PASS ! npass="+npass+", change="+Math.sqrt(amax_diff.get())+" ("+max_change+")");
......@@ -2198,6 +2198,27 @@ public class QuadCLT extends QuadCLTCPU {
if (full_woi_in != null) {
rendered_width = full_woi_in.width * GPUTileProcessor.DTT_SIZE;
}
boolean showPxPyD = false;
if (showPxPyD) {
int dbg_width = rendered_width/GPUTileProcessor.DTT_SIZE;
int dbg_height = pXpYD.length/dbg_width;
double [][] dbg_img = new double [3][pXpYD.length];
for (int i = 0; i < dbg_img.length; i++) {
Arrays.fill(dbg_img[i], Double.NaN);
}
for (int nTile = 0; nTile < pXpYD.length; nTile++) if (pXpYD[nTile] != null){
for (int i = 0; i < dbg_img.length; i++) {
dbg_img[i][nTile] = pXpYD[nTile][i];
}
}
(new ShowDoubleFloatArrays()).showArrays( // out of boundary 15
dbg_img,
dbg_width,
dbg_height,
true,
"pXpYD",
new String[] {"pX","pY","Disparity"});
}
//scene_QuadClt.getTileProcessor().getTileSize();
TpTask[] tp_tasks_ref = GpuQuad.setInterTasks( // "true" reference, with stereo actual reference will be offset
scene.getNumSensors(),
......
......@@ -540,7 +540,7 @@ public class QuadCLTCPU {
}
Properties inter_properties = new Properties();
String prefix = is_aux?PREFIX_AUX:PREFIX;
setProperties(prefix,inter_properties);
setProperties(prefix,inter_properties); // null pointer
OutputStream os;
try {
os = new FileOutputStream(path);
......@@ -1634,7 +1634,7 @@ public class QuadCLTCPU {
ErsCorrection ers = (ErsCorrection) gc;
ers.setPropertiesPose(prefix, properties);
ers.setPropertiesERS(prefix, properties);
ers.setPropertiesScenes(prefix, properties);
ers.setPropertiesScenes(prefix, properties); // null pointer
ers.setPropertiesLineTime(prefix, properties);
}
properties.setProperty(prefix+"num_orient", this.num_orient+"");
......
......@@ -8563,10 +8563,10 @@ if (debugLevel > -100) return true; // temporarily !
* @param debugLevel
* @throws Exception
*/
public void buildSeriesTQ(
public static void buildSeriesTQ(
QuadCLT quadCLT_main, // tiles should be set
int ref_index, // -1 - last
int ref_step,
int ref_index_unused, // -1 - last
int ref_step_unused, // not used here
CLTParameters clt_parameters,
EyesisCorrectionParameters.DebayerParameters debayerParameters,
ColorProcParameters colorProcParameters,
......@@ -8583,9 +8583,12 @@ if (debugLevel > -100) return true; // temporarily !
double stereo_intereye = clt_parameters.imp.stereo_intereye;
double stereo_phone_width = clt_parameters.imp.stereo_phone_width; // 0 - no padding
boolean stereo_pad = (stereo_intereye > 0) && (stereo_phone_width > 0);
int video_crf = clt_parameters.imp.video_crf;
String video_codec = clt_parameters.imp.video_codec.toLowerCase();
int video_crf_combo = clt_parameters.imp.video_crf_combo;
String video_codec_combo = clt_parameters.imp.video_codec_combo.toLowerCase();
int min_num_scenes = clt_parameters.imp.min_num_scenes; // abandon series if there are less than this number of scenes in it
if (min_num_scenes < 1) {
min_num_scenes = 1;
}
long start_time_all = System.nanoTime();
OpticalFlow opticalFlow = new OpticalFlow(
......@@ -8603,8 +8606,33 @@ if (debugLevel > -100) return true; // temporarily !
num_seq = pathFirstLast.length;
}
}
String [][] video_lists = new String [num_seq][];
int [][] stereo_widths = new int [num_seq][];
class VideoSet {
String [] video_paths;
int [] stereo_widths;
int earliest_scene, reference_scene;
VideoSet(
String [] paths,
int [] stereo_widths,
int earliest_scene,
int reference_scene) {
this.video_paths = paths;
this.stereo_widths = stereo_widths;
this.earliest_scene = earliest_scene;
this.reference_scene = reference_scene;
}
String [] getVideoPaths() {return video_paths;}
int [] getStereoWidths() {return stereo_widths;}
}
ArrayList<VideoSet> video_sets_list = new ArrayList<VideoSet>();
// String [][] video_lists = new String [num_seq][];
// int [] earliest_scene_pointer = new int[1];
// int [][] stereo_widths = new int [num_seq][];
for (int nseq = 0; nseq < num_seq; nseq++) {
long start_time_seq = System.nanoTime();
System.out.println("\nSTARTED PROCESSING SCENE SEQUENCE "+nseq+" (last is "+(num_seq-1)+")");
......@@ -8615,61 +8643,91 @@ if (debugLevel > -100) return true; // temporarily !
pathFirstLast[nseq].first, // int scene_first, // first scene to process
pathFirstLast[nseq].last); // int scene_last); // last scene to process (negative - add length
}
String [][] video_list = new String[1][];
int [][] widths_list = new int [1][];
String model_directory = opticalFlow.buildSeries(
(pathFirstLast != null), //boolean batch_mode,
quadCLT_main, // QuadCLT quadCLT_main, // tiles should be set
ref_index, // int ref_index, // -1 - last
ref_step, // int ref_step,
clt_parameters, // CLTParameters clt_parameters,
debayerParameters, // EyesisCorrectionParameters.DebayerParameters debayerParameters,
colorProcParameters, // ColorProcParameters colorProcParameters,
channelGainParameters, // CorrectionColorProc.ColorGainsParameters channelGainParameters,
rgbParameters, // EyesisCorrectionParameters.RGBParameters rgbParameters,
equirectangularParameters, // EyesisCorrectionParameters.EquirectangularParameters equirectangularParameters,
properties, // Properties properties,
reset_from_extrinsics, // boolean reset_from_extrinsics,
video_list, // String [][] video_list, // null or list of generated avi or webm paths
widths_list,
threadsMax, // final int threadsMax, // maximal number of threads to launch
updateStatus, // final boolean updateStatus,
debugLevel+2); // final int debugLevel)
video_lists[nseq] = video_list[0];
stereo_widths[nseq] = widths_list[0];
System.out.println("PROCESSING SCENE SEQUENCE "+nseq+" (last is "+(num_seq-1)+") is FINISHED in "+
IJ.d2s(0.000000001*(System.nanoTime()-start_time_seq),3)+" sec ("+
IJ.d2s(0.000000001*(System.nanoTime()-start_time_all),3)+" sec from the overall start");
// will open dialog if does not exist
String linkedModelsDirectory = quadCLT_main.correctionsParameters.selectLinkedModelsDirectory(true,true);
if ((linkedModelsDirectory != null) && (linkedModelsDirectory.length() > 0) && (model_directory != null)) {
Path pathAbsolute = Paths.get(model_directory);
Path pathBase = Paths.get(linkedModelsDirectory);
Path pathRelative = pathBase.relativize(pathAbsolute);
File linkDir = new File(linkedModelsDirectory);
linkDir.mkdirs();
File link = new File(linkDir, pathAbsolute.getFileName().toString());
if (link.exists()) {
link.delete();
}
Files.createSymbolicLink(link.toPath(), pathRelative);
}
int ref_index = -1; // -1 - last
int [] start_ref_pointers = new int[2];
while ((ref_index < 0) || ((ref_index + 1) >= min_num_scenes)) {
String model_directory = opticalFlow.buildSeries(
(pathFirstLast != null), //boolean batch_mode,
quadCLT_main, // QuadCLT quadCLT_main, // tiles should be set
ref_index, // int ref_index, // -1 - last
clt_parameters, // CLTParameters clt_parameters,
debayerParameters, // EyesisCorrectionParameters.DebayerParameters debayerParameters,
colorProcParameters, // ColorProcParameters colorProcParameters,
channelGainParameters, // CorrectionColorProc.ColorGainsParameters channelGainParameters,
rgbParameters, // EyesisCorrectionParameters.RGBParameters rgbParameters,
equirectangularParameters, // EyesisCorrectionParameters.EquirectangularParameters equirectangularParameters,
properties, // Properties properties,
reset_from_extrinsics, // boolean reset_from_extrinsics,
video_list, // String [][] video_list, // null or list of generated avi or webm paths
widths_list,
start_ref_pointers, // int [] start_ref_pointers,
threadsMax, // final int threadsMax, // maximal number of threads to launch
updateStatus, // final boolean updateStatus,
debugLevel+2); // final int debugLevel)
if (model_directory == null) {
System.out.println("Failed to build sequence for series "+ref_index);
break; // and go to the to next scene sequence from the list
}
video_sets_list.add(new VideoSet(
video_list[0], // String [] paths,
widths_list[0], // int [] stereo_widths,
start_ref_pointers[0], // int earliest_scene,
start_ref_pointers[1])); // int reference_scene);
String series_action = (start_ref_pointers[0] < (min_num_scenes-1))?"is FINISHED ":("will continue down from scene "+(start_ref_pointers[0]));
System.out.println("PROCESSING SCENE SEQUENCE "+nseq+" (last is "+(num_seq-1)+") "+series_action+" in "+
IJ.d2s(0.000000001*(System.nanoTime()-start_time_seq),3)+" sec ("+
IJ.d2s(0.000000001*(System.nanoTime()-start_time_all),3)+" sec from the overall start");
// will open dialog if does not exist
String linkedModelsDirectory = quadCLT_main.correctionsParameters.selectLinkedModelsDirectory(true,true);
if ((linkedModelsDirectory != null) && (linkedModelsDirectory.length() > 0)) {
Path pathAbsolute = Paths.get(model_directory);
Path pathBase = Paths.get(linkedModelsDirectory);
Path pathRelative = pathBase.relativize(pathAbsolute);
File linkDir = new File(linkedModelsDirectory);
linkDir.mkdirs();
File link = new File(linkDir, pathAbsolute.getFileName().toString());
if (link.exists()) {
link.delete();
}
Files.createSymbolicLink(link.toPath(), pathRelative);
}
if (start_ref_pointers[0] < (min_num_scenes-1)) {
break;
}
ref_index = start_ref_pointers[0]; // continue from the same attached to the previous reference
}
}
// combine videos if generated
if ((video_lists.length > 1) && (video_lists[0] != null) && (video_lists[0].length > 1)) { // do not combine if single sequence or no videos
if ((video_sets_list.size() > 1) &&
(video_sets_list.get(0).getVideoPaths() != null) &&
(video_sets_list.get(0).getVideoPaths().length > 0)) {
// need to sort first video_sets_list!
Collections.sort(video_sets_list, new Comparator<VideoSet>() {
@Override
public int compare(VideoSet lhs, VideoSet rhs) {
// -1 - less than, 1 - greater than, 0 - equal, not inverted for ascending disparity
return lhs.getVideoPaths()[0].compareTo(rhs.getVideoPaths()[0]);
}
});
// if ((video_lists.length > 1) && (video_lists[0] != null) && (video_lists[0].length > 1)) { // do not combine if single sequence or no videos
concat_videos: {
System.out.println("Generating "+(video_lists[0].length)+" combined video files.");
System.out.println("Generating "+(video_sets_list.get(0).getVideoPaths().length)+" combined video files.");
String videoDirectory = quadCLT_main.correctionsParameters.selectVideoDirectory(true,true);
if (videoDirectory == null) {
break concat_videos;
}
File video_dir = new File (videoDirectory);
video_dir.mkdirs(); // Should already exist actually
for (int nvideo = 0; nvideo < video_lists[0].length; nvideo++) {
for (int nvideo = 0; nvideo < video_sets_list.get(0).getVideoPaths().length; nvideo++) {
// get name with <ts_sec_first>-<ts_sec_last>
// String spath0 = video_lists[0][nvideo];
String name0 = Paths.get(video_lists[0][nvideo]).getFileName().toString();
String name1 = Paths.get(video_lists[video_lists.length-1][nvideo]).getFileName().toString();
String name0 = Paths.get(video_sets_list.get(0).getVideoPaths()[nvideo]).getFileName().toString();
String name1 = Paths.get(video_sets_list.get(video_sets_list.size()-1).getVideoPaths()[nvideo]).getFileName().toString();
String ts_sec0=name0.substring(0,name0.indexOf("_")); // seconds of the first timestamp
String ts_sec1=name1.substring(0,name1.indexOf("_")); // seconds of the last timestamp
String suffix0 = name0.substring(name0.indexOf("-")); // Skip timestamp
......@@ -8684,16 +8742,16 @@ if (debugLevel > -100) return true; // temporarily !
PrintWriter writer = new PrintWriter(list_to_concat, "UTF-8");
int this_stereo_width = 0;
int num_segments=0;
for (int i = 0; i <video_lists.length; i++) {
if ((video_lists[i] != null) && (video_lists[i].length > nvideo)) {
if ((new File(video_lists[i][nvideo])).exists()) {
writer.println("file '"+video_lists[i][nvideo]+"'");
for (int i = 0; i <video_sets_list.size(); i++) {
if (video_sets_list.size() > nvideo) {
if ((new File(video_sets_list.get(i).getVideoPaths()[nvideo])).exists()) {
writer.println("file '"+video_sets_list.get(i).getVideoPaths()[nvideo]+"'");
if (stereo_pad) {
this_stereo_width = stereo_widths[i][nvideo];
this_stereo_width = video_sets_list.get(i).getStereoWidths()[nvideo];
}
num_segments++;
} else {
System.out.println("Missing video segment: "+video_lists[i][nvideo]);
System.out.println("Missing video segment: "+video_sets_list.get(i).getVideoPaths()[nvideo]);
}
} else {
System.out.println("Specific video segment "+i+":"+nvideo+" is missing, skipping");
......@@ -8720,10 +8778,10 @@ if (debugLevel > -100) return true; // temporarily !
int padded_width= 16* ( (int) Math.round((this_stereo_width + stereo_gap) * stereo_phone_width/stereo_intereye/32));
shellCommand = String.format(
"ffmpeg -y -f concat -safe 0 -i %s -r 60 -vf pad=width=%d:height=0:x=-1:y=-1:color=black,setpts=%f*PTS -b:v 0 -crf %d -c %s %s",
list_to_concat.toString(), padded_width, pts_scale, video_crf, video_codec, video_out.toString());
list_to_concat.toString(), padded_width, pts_scale, video_crf_combo, video_codec_combo, video_out.toString());
} else {
shellCommand = String.format("ffmpeg -y -f concat -safe 0 -i %s -r 60 -vf setpts=%f*PTS -b:v 0 -crf %d -c %s %s",
list_to_concat.toString(), pts_scale, video_crf, video_codec, video_out.toString());
list_to_concat.toString(), pts_scale, video_crf_combo, video_codec_combo, video_out.toString());
}
Process p = null;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment