Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
I
imagej-elphel
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
3
Issues
3
List
Board
Labels
Milestones
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Commits
Issue Boards
Open sidebar
Elphel
imagej-elphel
Commits
ee4732b5
Commit
ee4732b5
authored
Jul 08, 2022
by
Andrey Filippov
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Before fixing renderGPUFromDSI with offset viewpoint
parent
90aa5914
Changes
7
Show whitespace changes
Inline
Side-by-side
Showing
7 changed files
with
799 additions
and
313 deletions
+799
-313
Eyesis_Correction.java
.../java/com/elphel/imagej/correction/Eyesis_Correction.java
+1
-1
ErsCorrection.java
...n/java/com/elphel/imagej/tileprocessor/ErsCorrection.java
+7
-5
IntersceneMatchParameters.java
...lphel/imagej/tileprocessor/IntersceneMatchParameters.java
+316
-31
OpticalFlow.java
...ain/java/com/elphel/imagej/tileprocessor/OpticalFlow.java
+335
-215
QuadCLT.java
src/main/java/com/elphel/imagej/tileprocessor/QuadCLT.java
+22
-1
QuadCLTCPU.java
...main/java/com/elphel/imagej/tileprocessor/QuadCLTCPU.java
+2
-2
TwoQuadCLT.java
...main/java/com/elphel/imagej/tileprocessor/TwoQuadCLT.java
+116
-58
No files found.
src/main/java/com/elphel/imagej/correction/Eyesis_Correction.java
View file @
ee4732b5
...
...
@@ -6818,7 +6818,7 @@ public class Eyesis_Correction implements PlugIn, ActionListener {
CLT_PARAMETERS
.
setRGBParameters
(
RGB_PARAMETERS
);
try
{
T
WO_QUAD_
CLT
.
buildSeriesTQ
(
T
woQuad
CLT
.
buildSeriesTQ
(
quadCLT
,
// QUAD_CLT, // QuadCLT quadCLT_main,
-
1
,
// int ref_index,
0
,
// int ref_step,
...
...
src/main/java/com/elphel/imagej/tileprocessor/ErsCorrection.java
View file @
ee4732b5
...
...
@@ -317,11 +317,13 @@ public class ErsCorrection extends GeometryCorrection {
public
void
setPropertiesScenes
(
String
prefix
,
Properties
properties
){
String
[]
timestamps
=
getScenes
();
for
(
String
k
:
timestamps
)
{
String
[]
s_scenes
=
getScene
(
k
).
toStrings
();
if
(
getScene
(
k
)
!=
null
)
{
String
[]
s_scenes
=
getScene
(
k
).
toStrings
();
// null pointer
properties
.
setProperty
(
prefix
+
SCENES_PREFIX
+
"_"
+
k
,
s_scenes
[
0
]);
properties
.
setProperty
(
prefix
+
SCENES_PREFIX
+
"_"
+
k
+
"_dt"
,
s_scenes
[
1
]);
properties
.
setProperty
(
prefix
+
SCENES_PREFIX
+
"_"
+
k
+
"_d2t"
,
s_scenes
[
2
]);
// properties.setProperty(prefix+SCENES_PREFIX+"_"+k, getScene(k).toString());
// properties.setProperty(prefix+SCENES_PREFIX+"_"+k, getScene(k).toString());
}
}
}
...
...
src/main/java/com/elphel/imagej/tileprocessor/IntersceneMatchParameters.java
View file @
ee4732b5
...
...
@@ -27,6 +27,7 @@ package com.elphel.imagej.tileprocessor;
import
java.awt.Color
;
import
java.io.IOException
;
import
java.util.Properties
;
import
java.util.StringTokenizer
;
import
com.elphel.imagej.common.GenericJTabbedDialog
;
...
...
@@ -57,9 +58,15 @@ public class IntersceneMatchParameters {
public
boolean
show_color_nan
=
true
;
// use NAN background for color images (sharp, but distinct black)
public
boolean
show_mono_nan
=
false
;
// use NAN background for monochrome images (sharp, but distinct black)
// public double [] stereo_bases = {0.0, 200.0, 500.0, 1000.0};
public
double
[][]
stereo_views
=
{
// base, up, back
{
0.0
,
0.0
,
0.0
},
{
200.0
,
0.0
,
0.0
},
{
500.0
,
0.0
,
2000.0
},
{
1000.0
,
500.0
,
3000.0
}};
public
double
[]
stereo_bases
=
{
0.0
,
200.0
,
500.0
,
1000.0
};
public
boolean
[]
generate_stereo_var
=
new
boolean
[
stereo_
base
s
.
length
];
// public boolean [] generate_stereo_var = new boolean[stereo_bases.length];
public
boolean
[]
generate_stereo_var
=
new
boolean
[
stereo_
view
s
.
length
];
// Other parameters
public
int
min_num_scenes
=
10
;
// abandon series if there are less than this number of scenes in it
...
...
@@ -94,19 +101,25 @@ public class IntersceneMatchParameters {
public
boolean
run_ffmpeg
=
true
;
// only after AVI
public
String
video_ext
=
".webm"
;
public
String
video_codec
=
"vp8"
;
// public String video_extra = "-b:v 0 -crf 40"; // extra FFMPEG parameters
public
int
video_crf
=
40
;
// lower - better, larger file size
public
boolean
remove_avi
=
true
;
// remove avi after conversion to webm
public
String
video_codec_combo
=
"vp8"
;
// applies when combining videos
public
int
video_crf_combo
=
40
;
// lower - better, larger file size applies when combining videos
public
boolean
um_mono
=
true
;
// applies to both TIFF and AVI
public
double
um_sigma
=
10
;
public
double
um_weight
=
0.97
;
//
public
boolean
mono_fixed
=
true
;
// normalize to fixed range when converting to 8 bits
public
double
mono_range
=
500.0
;
// monochrome full-scale range (+/- half)
public
boolean
anaglyth_en
=
true
;
// applies to both TIFF and AVI
public
static
Color
anaglyph_left_default
=
new
Color
(
255
,
0
,
0
);
// red
public
static
Color
anaglyph_right_default
=
new
Color
(
0
,
255
,
255
);
// cyan
public
Color
anaglyph_left
=
anaglyph_left_default
;
public
Color
anaglyph_right
=
anaglyph_right_default
;
// cyan
public
boolean
annotate_color
=
true
;
// annotate pseudo-color video frames with timestamps
public
boolean
annotate_mono
=
true
;
// annotate monochrome video frames with timestamps
public
Color
annotate_color_color
=
new
Color
(
255
,
255
,
255
);
// greenish over "fire"
// public Color annotate_color_mono = new Color( 255, 180, 100); // reddish over grey
public
Color
annotate_color_mono
=
new
Color
(
255
,
255
,
255
);
// reddish over grey
public
Color
annotate_color_color
=
new
Color
(
255
,
255
,
255
);
// greenish over "fire"
public
Color
annotate_color_mono
=
new
Color
(
255
,
180
,
50
);
// reddish over grey
public
boolean
annotate_transparent_mono
=
false
;
// // black if not transparent
...
...
@@ -237,6 +250,7 @@ public class IntersceneMatchParameters {
public
int
movDebugLevel
()
{
return
(
debug_level
>
-
1
)
?
mov_debug_level
:
0
;}
public
IntersceneMatchParameters
()
{
}
...
...
@@ -283,7 +297,7 @@ public class IntersceneMatchParameters {
"Correct in 3D scene images (from all 16 sensors) matching reference (last in sequence) scene with background (BG) priority."
);
gd
.
addCheckbox
(
"Generate binocular stereo pairs"
,
this
.
generate_stereo
,
"Generate stereo-pairs for 3D-corrected videos (FG,BG). Ebables specific modes (including 0-baseline / mono)."
);
/*
for (int i = 0; i < stereo_bases.length; i++) {
double base = stereo_bases[i];
String title = (base == 0.0)?
...
...
@@ -294,6 +308,26 @@ public class IntersceneMatchParameters {
"Generate "+base+"mm-baseline stereo scene sequences as Tiff and/or video.";
gd.addCheckbox (title, this.generate_stereo_var[i], tooltip);
}
*/
for
(
int
i
=
0
;
i
<
stereo_views
.
length
;
i
++)
{
// String stereo_view = doublesToString(stereo_views[i]);
double
base
=
stereo_views
[
i
][
0
];
// stereo_bases[i];
String
ub
=
String
.
format
(
"(%.0fmm up, %.0fmm back) "
,
stereo_views
[
i
][
1
],
stereo_views
[
i
][
2
]);
if
((
stereo_views
[
i
][
1
]==
0
)
&&
(
stereo_views
[
i
][
2
]==
0
)){
ub
=
""
;
}
String
title
=
(
base
==
0.0
)?
"Generate mono (single camera) scene sequences"
+
ub:
"Generate "
+
base
+
"mm-baseline stereo scene sequences"
+
ub
;
String
tooltip
=
(
base
==
0.0
)?
"Generate mono (single camera) scene sequences "
+
ub
+
"as Tiff and/or video."
:
"Generate "
+
base
+
"mm-baseline stereo scene sequences "
+
ub
+
"as Tiff and/or video."
;
gd
.
addCheckbox
(
title
,
this
.
generate_stereo_var
[
i
],
tooltip
);
}
gd
.
addMessage
(
"Generate/save reference (last) scene images"
);
gd
.
addCheckbox
(
"Export all-sensor images"
,
this
.
export_images
,
...
...
@@ -403,6 +437,7 @@ public class IntersceneMatchParameters {
"Weaker are removed unconditionally (not used now)."
);
gd
.
addMessage
(
"Stereo"
);
/*
if (stereo_bases.length > 0) {
String [] stereo_choices = new String [stereo_bases.length + 1];
stereo_choices[0] = "--none--";
...
...
@@ -412,8 +447,20 @@ public class IntersceneMatchParameters {
gd. addChoice("Remove stereo-base", stereo_choices, stereo_choices[0],
"Remove selected stereo-base");
}
gd
.
addStringField
(
"Add another baseline (mm)"
,
""
,
"Add another stereo baseline (mm)."
);
*/
if
(
stereo_views
.
length
>
0
)
{
String
[]
stereo_choices
=
new
String
[
stereo_views
.
length
+
1
];
stereo_choices
[
0
]
=
"--none--"
;
for
(
int
i
=
0
;
i
<
stereo_views
.
length
;
i
++)
{
stereo_choices
[
i
+
1
]
=
doublesToString
(
stereo_views
[
i
])+
" mm"
;
}
gd
.
addChoice
(
"Remove stereo-view (base, up, back)"
,
stereo_choices
,
stereo_choices
[
0
],
"Remove selected stereo-view, consisting of streo-base, viewpoint above camera, viewpoint behing camera - all in mm"
);
}
gd
.
addStringField
(
"Add another stereo view (baseline, above, behind)"
,
""
,
40
,
"Add another stereo view by providing baseline, above camera, behind camera (mm)."
);
// gd.addNumericField("Stereo baseline", this.stereo_baseline, 5,7,"mm",
// "Synthetic 3D with possibly exagerrated stereo baseline");
...
...
@@ -459,16 +506,17 @@ public class IntersceneMatchParameters {
"Converted video extension, starting with dot."
);
gd
.
addStringField
(
"Video encoder"
,
this
.
video_codec
,
60
,
"FFMPEG video encoder, such as \"VP8\" or \"VP9\"."
);
// gd.addStringField ("Video extra parameters", this.video_extra, 60,
// "FFMPEG video encoder additional parameters, such as, such as \"-b:v 0 -crf 40\".");
gd
.
addNumericField
(
"Video CRF"
,
this
.
video_crf
,
0
,
3
,
""
,
"Quality - the lower the better. 40 - OK"
);
gd
.
addCheckbox
(
"Remove AVI"
,
this
.
remove_avi
,
"Remove large AVI files after (and only) conversion with ffmpeg."
);
gd
.
addStringField
(
"Video encoder for combining"
,
this
.
video_codec_combo
,
60
,
"FFMPEG video encoder, such as \"VP8\" or \"VP9\". Applies when merging segments."
);
gd
.
addNumericField
(
"Video CRF for combining"
,
this
.
video_crf_combo
,
0
,
3
,
""
,
"Quality - the lower the better. 40 - OK. Applies when merging segments."
);
gd
.
addCheckbox
(
"Remove AVI"
,
this
.
remove_avi
,
"Remove large AVI files after conversion with ffmpeg."
);
gd
.
addCheckbox
(
"Apply unsharp mask to mono"
,
this
.
um_mono
,
"Apply unsharp mask to monochrome image sequences/video. Applies to TIFF generatiojn too"
);
gd
.
addNumericField
(
"Unsharp mask sigma (radius)"
,
this
.
um_sigma
,
5
,
7
,
"pix"
,
...
...
@@ -482,6 +530,18 @@ public class IntersceneMatchParameters {
"Monochrome full range to convert to 0..255."
);
gd
.
addCheckbox
(
"Generate anaglyph stereo"
,
this
.
anaglyth_en
,
"Apply unsharp mask to monochrome image sequences/video. Applies to TIFF generatiojn too"
);
{
String
scolor
=
String
.
format
(
"%08x"
,
getLongColor
(
this
.
anaglyph_left
));
gd
.
addStringField
(
"Anaglyph color left"
,
scolor
,
8
,
"Any invalid hex number sets default red"
);}
{
String
scolor
=
String
.
format
(
"%08x"
,
getLongColor
(
this
.
anaglyph_right
));
gd
.
addStringField
(
"Anaglyph color right"
,
scolor
,
8
,
"Any invalid hex number sets default cyan"
);}
gd
.
addCheckbox
(
"Timestamp color videos"
,
this
.
annotate_color
,
"Annotate pseudo-color video frames with timestamps."
);
...
...
@@ -490,6 +550,7 @@ public class IntersceneMatchParameters {
String
scolor
=
(
this
.
annotate_color_color
==
null
)?
"none"
:
String
.
format
(
"%08x"
,
getLongColor
(
this
.
annotate_color_color
));
gd
.
addStringField
(
"Timestamp color for pseudocolor frames"
,
scolor
,
8
,
"Any invalid hex number disables annotation"
);
scolor
=
(
this
.
annotate_color_mono
==
null
)?
"none"
:
String
.
format
(
"%08x"
,
getLongColor
(
this
.
annotate_color_mono
));
gd
.
addStringField
(
"Timestamp color for monochrome frames"
,
scolor
,
8
,
"Any invalid hex number disables annotation"
);
gd
.
addCheckbox
(
"Transparent timestamp background (monochrome)"
,
this
.
annotate_transparent_mono
,
"Put monochrome timestamp over image (unchecked - use black background). Color - always black."
);
...
...
@@ -644,9 +705,14 @@ public class IntersceneMatchParameters {
this
.
generate_fg
=
gd
.
getNextBoolean
();
this
.
generate_bg
=
gd
.
getNextBoolean
();
this
.
generate_stereo
=
gd
.
getNextBoolean
();
/*
for (int i = 0; i < stereo_bases.length; i++) {
this.generate_stereo_var[i] = gd.getNextBoolean();
}
*/
for
(
int
i
=
0
;
i
<
stereo_views
.
length
;
i
++)
{
this
.
generate_stereo_var
[
i
]
=
gd
.
getNextBoolean
();
}
this
.
export_images
=
gd
.
getNextBoolean
();
this
.
show_images
=
gd
.
getNextBoolean
();
...
...
@@ -701,6 +767,7 @@ public class IntersceneMatchParameters {
this
.
strong_strength
=
gd
.
getNextNumber
();
this
.
weak_strength
=
gd
.
getNextNumber
();
/*
if (stereo_bases.length > 0) {
int i = gd.getNextChoiceIndex();
if (i > 0) {
...
...
@@ -717,6 +784,15 @@ public class IntersceneMatchParameters {
}
}
*/
if
(
stereo_views
.
length
>
0
)
{
int
i
=
gd
.
getNextChoiceIndex
();
if
(
i
>
0
)
{
removeStereoView
(
i
-
1
);
}
}
String
s
=
gd
.
getNextString
();
addStereoView
(
s
,
true
);
// this.stereo_baseline = gd.getNextNumber();
this
.
stereo_merge
=
gd
.
getNextBoolean
();
...
...
@@ -739,14 +815,39 @@ public class IntersceneMatchParameters {
this
.
run_ffmpeg
=
gd
.
getNextBoolean
();
this
.
video_ext
=
gd
.
getNextString
();
this
.
video_codec
=
gd
.
getNextString
();
// this.video_extra= gd.getNextString();
this
.
video_crf
=
(
int
)
gd
.
getNextNumber
();
this
.
remove_avi
=
gd
.
getNextBoolean
();
this
.
video_codec_combo
=
gd
.
getNextString
();
this
.
video_crf_combo
=
(
int
)
gd
.
getNextNumber
();
this
.
um_mono
=
gd
.
getNextBoolean
();
this
.
um_sigma
=
gd
.
getNextNumber
();
this
.
um_weight
=
gd
.
getNextNumber
();
this
.
mono_fixed
=
gd
.
getNextBoolean
();
this
.
mono_range
=
gd
.
getNextNumber
();
this
.
anaglyth_en
=
gd
.
getNextBoolean
();
{
String
scolor
=
gd
.
getNextString
();
long
lcolor
=
-
1
;
try
{
lcolor
=
Long
.
parseLong
(
scolor
,
16
);
this
.
anaglyph_left
=
setLongColor
(
lcolor
);
}
catch
(
NumberFormatException
e
){
this
.
anaglyph_left
=
anaglyph_left_default
;
}
}
{
String
scolor
=
gd
.
getNextString
();
long
lcolor
=
-
1
;
try
{
lcolor
=
Long
.
parseLong
(
scolor
,
16
);
this
.
anaglyph_right
=
setLongColor
(
lcolor
);
}
catch
(
NumberFormatException
e
){
this
.
anaglyph_right
=
anaglyph_right_default
;
}
}
this
.
annotate_color
=
gd
.
getNextBoolean
();
this
.
annotate_mono
=
gd
.
getNextBoolean
();
{
...
...
@@ -855,12 +956,20 @@ public class IntersceneMatchParameters {
properties
.
setProperty
(
prefix
+
"generate_fg"
,
this
.
generate_fg
+
""
);
// boolean
properties
.
setProperty
(
prefix
+
"generate_bg"
,
this
.
generate_bg
+
""
);
// boolean
properties
.
setProperty
(
prefix
+
"generate_stereo"
,
this
.
generate_stereo
+
""
);
// boolean
/*
properties.setProperty(prefix+"stereo_bases_num", this.stereo_bases.length+""); // int
for (int i = 0; i < this.stereo_bases.length; i++) {
properties.setProperty(prefix+"stereo_bases_"+i, this.stereo_bases[i]+""); // double
properties.setProperty(prefix+"generate_stereo_var_"+i, this.generate_stereo_var[i]+""); // boolean
}
*/
properties
.
setProperty
(
prefix
+
"stereo_views_num"
,
this
.
stereo_views
.
length
+
""
);
// int
for
(
int
i
=
0
;
i
<
this
.
stereo_views
.
length
;
i
++)
{
properties
.
setProperty
(
prefix
+
"stereo_views_"
+
i
,
doublesToString
(
this
.
stereo_views
[
i
]));
// String
properties
.
setProperty
(
prefix
+
"generate_stereo_var_"
+
i
,
this
.
generate_stereo_var
[
i
]+
""
);
// boolean
}
properties
.
setProperty
(
prefix
+
"export_images"
,
this
.
export_images
+
""
);
// boolean
properties
.
setProperty
(
prefix
+
"show_images"
,
this
.
show_images
+
""
);
// boolean
...
...
@@ -930,14 +1039,24 @@ public class IntersceneMatchParameters {
properties
.
setProperty
(
prefix
+
"run_ffmpeg"
,
this
.
run_ffmpeg
+
""
);
// boolean
properties
.
setProperty
(
prefix
+
"video_ext"
,
this
.
video_ext
+
""
);
// String
properties
.
setProperty
(
prefix
+
"video_codec"
,
this
.
video_codec
+
""
);
// String
// properties.setProperty(prefix+"video_extra", this.video_extra+""); // String
properties
.
setProperty
(
prefix
+
"video_crf"
,
this
.
video_crf
+
""
);
// int
properties
.
setProperty
(
prefix
+
"remove_avi"
,
this
.
remove_avi
+
""
);
// boolean
properties
.
setProperty
(
prefix
+
"video_codec_combo"
,
this
.
video_codec_combo
+
""
);
// String
properties
.
setProperty
(
prefix
+
"video_crf_combo"
,
this
.
video_crf_combo
+
""
);
// int
properties
.
setProperty
(
prefix
+
"um_mono"
,
this
.
um_mono
+
""
);
// boolean
properties
.
setProperty
(
prefix
+
"um_sigma"
,
this
.
um_sigma
+
""
);
// double
properties
.
setProperty
(
prefix
+
"um_weight"
,
this
.
um_weight
+
""
);
// double
properties
.
setProperty
(
prefix
+
"mono_fixed"
,
this
.
mono_fixed
+
""
);
// boolean
properties
.
setProperty
(
prefix
+
"mono_range"
,
this
.
mono_range
+
""
);
// double
properties
.
setProperty
(
prefix
+
"anaglyth_en"
,
this
.
anaglyth_en
+
""
);
// boolean
properties
.
setProperty
(
prefix
+
"anaglyph_left"
,
getLongColor
(
anaglyph_left
)+
""
);
// Color
properties
.
setProperty
(
prefix
+
"anaglyph_right"
,
getLongColor
(
anaglyph_right
)+
""
);
// Color
properties
.
setProperty
(
prefix
+
"annotate_color"
,
this
.
annotate_color
+
""
);
// boolean
properties
.
setProperty
(
prefix
+
"annotate_mono"
,
this
.
annotate_mono
+
""
);
// boolean
{
...
...
@@ -1031,6 +1150,7 @@ public class IntersceneMatchParameters {
if
(
properties
.
getProperty
(
prefix
+
"generate_bg"
)!=
null
)
this
.
generate_bg
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"generate_bg"
));
if
(
properties
.
getProperty
(
prefix
+
"generate_stereo"
)!=
null
)
this
.
generate_stereo
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"generate_stereo"
));
/*
if (properties.getProperty(prefix+"stereo_bases_num")!=null) {
int stereo_bases_num=Integer.parseInt(properties.getProperty(prefix+"stereo_bases_num"));
this.stereo_bases = new double[stereo_bases_num];
...
...
@@ -1041,6 +1161,24 @@ public class IntersceneMatchParameters {
}
orderStereo();
}
*/
if
(
properties
.
getProperty
(
prefix
+
"stereo_views_num"
)!=
null
)
{
int
stereo_views_num
=
Integer
.
parseInt
(
properties
.
getProperty
(
prefix
+
"stereo_views_num"
));
this
.
stereo_views
=
new
double
[
stereo_views_num
][];
this
.
generate_stereo_var
=
new
boolean
[
stereo_views_num
];
for
(
int
i
=
0
;
i
<
stereo_views_num
;
i
++)
{
if
(
properties
.
getProperty
(
prefix
+
"stereo_views_"
+
i
)!=
null
)
{
this
.
stereo_views
[
i
]=
StringToDoubles
(
properties
.
getProperty
(
prefix
+
"stereo_views_"
+
i
),
3
);
}
if
(
properties
.
getProperty
(
prefix
+
"generate_stereo_var_"
+
i
)!=
null
)
{
this
.
generate_stereo_var
[
i
]=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"generate_stereo_var_"
+
i
));
}
}
orderStereoViews
();
}
if
(
properties
.
getProperty
(
prefix
+
"export_images"
)!=
null
)
this
.
export_images
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"export_images"
));
if
(
properties
.
getProperty
(
prefix
+
"show_images"
)!=
null
)
this
.
show_images
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"show_images"
));
...
...
@@ -1109,14 +1247,32 @@ public class IntersceneMatchParameters {
if
(
properties
.
getProperty
(
prefix
+
"run_ffmpeg"
)!=
null
)
this
.
run_ffmpeg
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"run_ffmpeg"
));
if
(
properties
.
getProperty
(
prefix
+
"video_ext"
)!=
null
)
this
.
video_ext
=(
String
)
properties
.
getProperty
(
prefix
+
"video_ext"
);
if
(
properties
.
getProperty
(
prefix
+
"video_codec"
)!=
null
)
this
.
video_codec
=(
String
)
properties
.
getProperty
(
prefix
+
"video_codec"
);
/// if (properties.getProperty(prefix+"video_extra")!=null) this.video_extra=(String) properties.getProperty(prefix+"video_extra");
if
(
properties
.
getProperty
(
prefix
+
"video_crf"
)!=
null
)
this
.
video_crf
=
Integer
.
parseInt
(
properties
.
getProperty
(
prefix
+
"video_crf"
));
if
(
properties
.
getProperty
(
prefix
+
"remove_avi"
)!=
null
)
this
.
remove_avi
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"remove_avi"
));
if
(
properties
.
getProperty
(
prefix
+
"video_codec_combo"
)!=
null
)
this
.
video_codec_combo
=(
String
)
properties
.
getProperty
(
prefix
+
"video_codec_combo"
);
if
(
properties
.
getProperty
(
prefix
+
"video_crf_combo"
)!=
null
)
this
.
video_crf_combo
=
Integer
.
parseInt
(
properties
.
getProperty
(
prefix
+
"video_crf_combo"
));
if
(
properties
.
getProperty
(
prefix
+
"um_mono"
)!=
null
)
this
.
um_mono
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"um_mono"
));
if
(
properties
.
getProperty
(
prefix
+
"um_sigma"
)!=
null
)
this
.
um_sigma
=
Double
.
parseDouble
(
properties
.
getProperty
(
prefix
+
"um_sigma"
));
if
(
properties
.
getProperty
(
prefix
+
"um_weight"
)!=
null
)
this
.
um_weight
=
Double
.
parseDouble
(
properties
.
getProperty
(
prefix
+
"um_weight"
));
if
(
properties
.
getProperty
(
prefix
+
"mono_fixed"
)!=
null
)
this
.
mono_fixed
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"mono_fixed"
));
if
(
properties
.
getProperty
(
prefix
+
"mono_range"
)!=
null
)
this
.
mono_range
=
Double
.
parseDouble
(
properties
.
getProperty
(
prefix
+
"mono_range"
));
if
(
properties
.
getProperty
(
prefix
+
"anaglyth_en"
)!=
null
)
this
.
anaglyth_en
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"anaglyth_en"
));
if
(
properties
.
getProperty
(
prefix
+
"anaglyph_left"
)
!=
null
)
{
try
{
this
.
anaglyph_left
=
setLongColor
(
Long
.
parseLong
(
properties
.
getProperty
(
prefix
+
"anaglyph_left"
)));
}
catch
(
NumberFormatException
e
){
this
.
anaglyph_left
=
anaglyph_left_default
;
}
}
if
(
properties
.
getProperty
(
prefix
+
"anaglyph_right"
)
!=
null
)
{
try
{
this
.
anaglyph_right
=
setLongColor
(
Long
.
parseLong
(
properties
.
getProperty
(
prefix
+
"anaglyph_right"
)));
}
catch
(
NumberFormatException
e
){
this
.
anaglyph_right
=
anaglyph_right_default
;
}
}
if
(
properties
.
getProperty
(
prefix
+
"annotate_color"
)!=
null
)
this
.
annotate_color
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"annotate_color"
));
if
(
properties
.
getProperty
(
prefix
+
"annotate_mono"
)!=
null
)
this
.
annotate_mono
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"annotate_mono"
));
...
...
@@ -1216,7 +1372,11 @@ public class IntersceneMatchParameters {
imp
.
generate_stereo
=
this
.
generate_stereo
;
imp
.
stereo_bases
=
this
.
stereo_bases
.
clone
();
// imp.stereo_bases = this.stereo_bases.clone();
imp
.
stereo_views
=
this
.
stereo_views
.
clone
();
for
(
int
i
=
0
;
i
<
this
.
stereo_views
.
length
;
i
++)
{
imp
.
stereo_views
[
i
]
=
this
.
stereo_views
[
i
].
clone
();
}
imp
.
generate_stereo_var
=
this
.
generate_stereo_var
.
clone
();
imp
.
export_images
=
this
.
export_images
;
...
...
@@ -1286,9 +1446,10 @@ public class IntersceneMatchParameters {
imp
.
run_ffmpeg
=
this
.
run_ffmpeg
;
imp
.
video_ext
=
this
.
video_ext
;
imp
.
video_codec
=
this
.
video_codec
;
// imp.video_extra = this. video_extra;
imp
.
video_crf
=
this
.
video_crf
;
imp
.
remove_avi
=
this
.
remove_avi
;
imp
.
video_codec_combo
=
this
.
video_codec_combo
;
imp
.
video_crf_combo
=
this
.
video_crf_combo
;
imp
.
um_mono
=
this
.
um_mono
;
imp
.
um_sigma
=
this
.
um_sigma
;
...
...
@@ -1296,6 +1457,10 @@ public class IntersceneMatchParameters {
imp
.
mono_fixed
=
this
.
mono_fixed
;
imp
.
mono_range
=
this
.
mono_range
;
imp
.
anaglyth_en
=
this
.
anaglyth_en
;
imp
.
anaglyph_left
=
this
.
anaglyph_left
;
imp
.
anaglyph_right
=
this
.
anaglyph_right
;
imp
.
annotate_color
=
this
.
annotate_color
;
imp
.
annotate_mono
=
this
.
annotate_mono
;
imp
.
annotate_color_color
=
this
.
annotate_color_color
;
...
...
@@ -1372,6 +1537,7 @@ public class IntersceneMatchParameters {
}
}
/*
public void orderStereo(){
boolean ordered;
do {
...
...
@@ -1390,7 +1556,6 @@ public class IntersceneMatchParameters {
} while (!ordered);
}
public void addStereo(double base, boolean en) {
double [] bases = new double [stereo_bases.length + 1];
boolean [] ens = new boolean [stereo_bases.length + 1];
...
...
@@ -1402,7 +1567,6 @@ public class IntersceneMatchParameters {
generate_stereo_var = ens;
orderStereo();
}
public void removeStereo(int indx) {
if ((indx >=0) && (indx <stereo_bases.length)) {
double [] bases = new double [stereo_bases.length - 1];
...
...
@@ -1421,5 +1585,126 @@ public class IntersceneMatchParameters {
}
*/
public
void
orderStereoViews
(){
boolean
ordered
;
do
{
ordered
=
true
;
for
(
int
i
=
0
;
i
<
(
stereo_views
.
length
-
1
);
i
++)
{
if
(
stereo_views
[
i
+
1
][
0
]
>
stereo_views
[
i
][
0
])
{
continue
;
}
if
(
(
stereo_views
[
i
+
1
][
0
]
==
stereo_views
[
i
][
0
])
&&
(
stereo_views
[
i
+
1
][
1
]
>
stereo_views
[
i
][
1
]))
{
continue
;
}
if
(
(
stereo_views
[
i
+
1
][
0
]
==
stereo_views
[
i
][
0
])
&&
(
stereo_views
[
i
+
1
][
1
]
==
stereo_views
[
i
][
1
])
&&
(
stereo_views
[
i
+
1
][
2
]
>
stereo_views
[
i
][
2
]))
{
continue
;
}
if
(
(
stereo_views
[
i
+
1
][
0
]
==
stereo_views
[
i
][
0
])
&&
(
stereo_views
[
i
+
1
][
1
]
==
stereo_views
[
i
][
1
])
&&
(
stereo_views
[
i
+
1
][
2
]
==
stereo_views
[
i
][
2
]))
{
// all same values - remove extra
generate_stereo_var
[
i
]
|=
generate_stereo_var
[
i
+
1
];
for
(
int
j
=
i
+
1
;
j
<
(
stereo_views
.
length
-
1
);
j
++)
{
generate_stereo_var
[
j
]
=
generate_stereo_var
[
j
+
1
];
stereo_views
[
j
]
=
stereo_views
[
j
+
1
];
}
ordered
=
false
;
break
;
// next while
}
boolean
en
=
generate_stereo_var
[
i
+
1
];
generate_stereo_var
[
i
+
1
]
=
generate_stereo_var
[
i
];
generate_stereo_var
[
i
]
=
en
;
double
[]
view
=
stereo_views
[
i
+
1
];
stereo_views
[
i
+
1
]
=
stereo_views
[
i
];
stereo_views
[
i
]
=
view
;
ordered
=
false
;
}
}
while
(!
ordered
);
return
;
}
public
void
addStereoView
(
String
stereo_view_string
,
boolean
en
)
{
double
[]
stereo_view
=
StringToDoubles
(
stereo_view_string
,
3
);
if
(
stereo_view
!=
null
)
{
addStereoView
(
stereo_view
,
en
);
}
}
public
void
addStereoView
(
double
[]
stereo_view
,
boolean
en
)
{
double
[][]
views
=
new
double
[
stereo_views
.
length
+
1
][];
boolean
[]
ens
=
new
boolean
[
stereo_views
.
length
+
1
];
views
[
0
]
=
stereo_view
;
ens
[
0
]
=
en
;
System
.
arraycopy
(
stereo_views
,
0
,
views
,
1
,
stereo_views
.
length
);
System
.
arraycopy
(
generate_stereo_var
,
0
,
ens
,
1
,
stereo_views
.
length
);
stereo_views
=
views
;
generate_stereo_var
=
ens
;
orderStereoViews
();
}
public
void
removeStereoView
(
int
indx
)
{
if
((
indx
>=
0
)
&&
(
indx
<
stereo_views
.
length
))
{
double
[][]
views
=
new
double
[
stereo_views
.
length
-
1
][];
boolean
[]
ens
=
new
boolean
[
stereo_views
.
length
-
1
];
if
(
indx
>
0
)
{
System
.
arraycopy
(
stereo_views
,
0
,
views
,
0
,
indx
);
System
.
arraycopy
(
generate_stereo_var
,
0
,
ens
,
0
,
indx
);
}
if
(
indx
<
(
stereo_views
.
length
-
1
))
{
System
.
arraycopy
(
stereo_views
,
indx
+
1
,
views
,
indx
,
stereo_views
.
length
-
indx
-
1
);
System
.
arraycopy
(
generate_stereo_var
,
indx
+
1
,
ens
,
indx
,
stereo_views
.
length
-
indx
-
1
);
}
stereo_views
=
views
;
generate_stereo_var
=
ens
;
}
}
public
static
String
doublesToString
(
double
[]
data
)
{
return
doublesToString
(
data
,
null
);
}
public
static
String
doublesToString
(
double
[]
data
,
String
fmt
)
{
if
((
fmt
==
null
)
||
(
fmt
.
trim
().
length
()==
0
))
{
fmt
=
"%.0f"
;
}
String
s
=
""
;
for
(
int
i
=
0
;
i
<
data
.
length
;
i
++)
{
s
+=
String
.
format
(
fmt
,
data
[
i
]);
if
(
i
<
(
data
.
length
-
1
))
{
s
+=
", "
;
}
}
return
s
;
}
public
static
double
[]
StringToDoubles
(
String
s
,
int
len
)
{
StringTokenizer
st
=
new
StringTokenizer
(
s
,
" \t\n\r\f,"
);
if
(
st
.
countTokens
()
==
0
)
{
return
null
;
}
if
(
len
<=
0
)
{
len
=
st
.
countTokens
();
}
double
[]
data
=
new
double
[
len
];
int
i
=
0
;
while
(
st
.
hasMoreTokens
()
&&
(
i
<
len
))
{
double
d
=
0
;
try
{
d
=
Double
.
parseDouble
(
st
.
nextToken
());
}
catch
(
NumberFormatException
e
){
d
=
0
;
}
data
[
i
++]
=
d
;
}
return
data
;
}
}
src/main/java/com/elphel/imagej/tileprocessor/OpticalFlow.java
View file @
ee4732b5
...
...
@@ -2780,9 +2780,9 @@ public class OpticalFlow {
}
}
}
final
boolean
ref_is_identity
=
(
scene_xyz
[
0
]==
0.0
)
&&
(
scene_xyz
[
1
]==
0.0
)
&&
(
scene_xyz
[
1
]==
0.0
)
&&
(
scene_atr
[
0
]==
0.0
)
&&
(
scene_atr
[
1
]==
0.0
)
&&
(
scene_atr
[
1
]==
0.0
);
final
boolean
ref_is_identity
=
false
;
/// (scene_xyz[0]==0.0) && (scene_xyz[1]==0.0) && (scene_xyz[2
]==0.0) &&
/// (scene_atr[0]==0.0) && (scene_atr[1]==0.0) && (scene_atr[2
]==0.0);
final
double
[]
disparity_ref
=
dref
;
// final int tilesX_ref = ref_w;
// final int tilesY_ref = ref_h;
...
...
@@ -3963,7 +3963,7 @@ public class OpticalFlow {
boolean
batch_mode
,
QuadCLT
quadCLT_main
,
// tiles should be set
int
ref_index
,
// -1 - last
int
ref_step
,
// int start_index
,
CLTParameters
clt_parameters
,
EyesisCorrectionParameters
.
DebayerParameters
debayerParameters
,
ColorProcParameters
colorProcParameters
,
...
...
@@ -3974,6 +3974,7 @@ public class OpticalFlow {
boolean
reset_from_extrinsics
,
String
[][]
videos
,
// null or String[1][] list of generated avi or webm paths
int
[][]
stereo_widths
,
// null or int[1][] matching videos -
int
[]
start_ref_pointers
,
// [0] - earliest valid scene, [1] ref_index
// each element is 0 for non-stereo and full width for stereo
final
int
threadsMax
,
// maximal number of threads to launch
final
boolean
updateStatus
,
...
...
@@ -4032,11 +4033,15 @@ public class OpticalFlow {
clt_parameters
.
imp
.
generate_bg
};
boolean
generate_stereo
=
clt_parameters
.
imp
.
generate_stereo
;
double
[]
stereo_bases
=
clt_parameters
.
imp
.
stereo_bases
;
// {0.0, 200.0, 500.0, 1000.0};
// double [] stereo_bases = clt_parameters.imp.stereo_bases; // {0.0, 200.0, 500.0, 1000.0};
double
[][]
stereo_views
=
clt_parameters
.
imp
.
stereo_views
;
// {0.0, 200.0, 500.0, 1000.0};
boolean
[]
generate_stereo_var
=
clt_parameters
.
imp
.
generate_stereo_var
;
boolean
stereo_merge
=
clt_parameters
.
imp
.
stereo_merge
;
boolean
anaglyth_en
=
clt_parameters
.
imp
.
anaglyth_en
;
final
Color
anaglyph_left
=
clt_parameters
.
imp
.
anaglyph_left
;
final
Color
anaglyph_right
=
clt_parameters
.
imp
.
anaglyph_right
;
int
stereo_gap
=
clt_parameters
.
imp
.
stereo_gap
;
// double stereo_intereye = clt_parameters.imp.stereo_intereye;
// double stereo_phone_width = clt_parameters.imp.stereo_phone_width;
...
...
@@ -4098,11 +4103,21 @@ public class OpticalFlow {
System
.
out
.
println
(
"buildSeriesTQ(): No files to process (of "
+
sourceFiles0
.
length
+
")"
);
return
null
;
}
// set_channels will include all 99 scenes even as quadCLTs.length matches ref_index
QuadCLT
.
SetChannels
[]
set_channels
=
quadCLT_main
.
setChannels
(
debugLevel
);
QuadCLT
[]
quadCLTs
=
new
QuadCLT
[
set_channels
.
length
];
if
(
ref_index
<
0
)
{
ref_index
+=
quadCLTs
.
length
;
ref_index
+=
set_channels
.
length
;
}
if
(
start_ref_pointers
!=
null
)
{
start_ref_pointers
[
0
]
=
0
;
start_ref_pointers
[
1
]
=
ref_index
;
}
QuadCLT
[]
quadCLTs
=
new
QuadCLT
[
ref_index
+
1
];
// [set_channels.length];
//start_index
double
[][][]
scenes_xyzatr
=
new
double
[
quadCLTs
.
length
][][];
// previous scene relative to the next one
scenes_xyzatr
[
ref_index
]
=
new
double
[
2
][
3
];
// all zeros
// See if build_ref_dsi is needed
...
...
@@ -4315,6 +4330,9 @@ public class OpticalFlow {
if
((
ref_index
-
earliest_scene
+
1
)
<
min_num_scenes
)
{
System
.
out
.
println
(
"Total number of useful scenes = "
+(
ref_index
-
earliest_scene
+
1
)+
" < "
+
min_num_scenes
+
". Scrapping this series."
);
if
(
start_ref_pointers
!=
null
)
{
start_ref_pointers
[
0
]
=
earliest_scene
;
}
return
null
;
}
if
(
earliest_scene
>
0
)
{
...
...
@@ -4326,7 +4344,7 @@ public class OpticalFlow {
}
quadCLTs
[
ref_index
].
set_orient
(
1
);
// first orientation
quadCLTs
[
ref_index
].
set_accum
(
0
);
// reset accumulations ("build_interscene") number
quadCLTs
[
ref_index
].
saveInterProperties
(
// save properties for interscene processing (extrinsics, ers, ...)
quadCLTs
[
ref_index
].
saveInterProperties
(
// save properties for interscene processing (extrinsics, ers, ...)
// null pointer
null
,
// String path, // full name with extension or w/o path to use x3d directory
debugLevel
+
1
);
}
else
{
// if (build_orientations) {
...
...
@@ -4341,11 +4359,11 @@ public class OpticalFlow {
}
}
}
// just in case that orientations were calculated before:
earliest_scene
=
getEarliestScene
(
quadCLTs
);
double
[][]
combo_dsn_final
=
null
;
while
(!
reuse_video
&&
((
quadCLTs
[
ref_index
].
getNumOrient
()
<
min_num_orient
)
||
(
quadCLTs
[
ref_index
].
getNumAccum
()
<
min_num_interscene
)))
{
// if (build_interscene) {
// start with interscene accumulations if number of accumulations is less than number of performed
// orientations or no orientations is needed
if
((
quadCLTs
[
ref_index
].
getNumAccum
()
<
min_num_interscene
)
&&
((
quadCLTs
[
ref_index
].
getNumAccum
()
<
quadCLTs
[
ref_index
].
getNumOrient
())||
(
quadCLTs
[
ref_index
].
getNumOrient
()
>=
min_num_orient
)))
{
...
...
@@ -4389,6 +4407,9 @@ public class OpticalFlow {
if
((
ref_index
-
earliest_scene
+
1
)
<
min_num_scenes
)
{
System
.
out
.
println
(
"After reAdjustPairsLMAInterscene() total number of useful scenes = "
+(
ref_index
-
earliest_scene
+
1
)+
" < "
+
min_num_scenes
+
". Scrapping this series."
);
if
(
start_ref_pointers
!=
null
)
{
start_ref_pointers
[
0
]
=
earliest_scene
;
}
return
null
;
}
if
(
earliest_scene
>
0
)
{
...
...
@@ -4404,21 +4425,6 @@ public class OpticalFlow {
if
(
test_ers
)
{
// only debug feature
test_ers0
=
quadCLTs
.
length
-
1
;
// make it always == reference !
//Already done in any case
/*
if (!force_initial_orientations && !build_interscene && !readjust_orient) {
for (int nscene = 0; nscene < (quadCLTs.length -1); nscene++) {
if ((Math.abs(nscene - test_ers0) <= 1) || (Math.abs(nscene - test_ers1) <= 1)) {
quadCLTs[nscene] = (QuadCLT) quadCLT_main.spawnNoModelQuadCLT( // restores image data
set_channels[nscene].set_name,
clt_parameters,
colorProcParameters, //
threadsMax,
debugLevel-2);
}
}
}
*/
testERS
(
clt_parameters
,
// CLTParameters clt_parameters,
...
...
@@ -4432,22 +4438,9 @@ public class OpticalFlow {
return
quadCLTs
[
ref_index
].
getX3dTopDirectory
();
}
// generates 3-dmodes, colors, stereos, tiffs/videos
// generates 3-d
modes, colors, stereos, tiffs/videos
if
(
generate_mapped
||
reuse_video
)
{
// already done in any case
/*
if (!force_initial_orientations && !build_interscene) {
for (int scene_index = ref_index - 1; scene_index >= 0 ; scene_index--) {
quadCLTs[scene_index] = (QuadCLT) quadCLT_main.spawnNoModelQuadCLT( // restores image data
set_channels[scene_index].set_name,
clt_parameters,
colorProcParameters, //
threadsMax,
debugLevel-2);
}
}
*/
int
tilesX
=
quadCLTs
[
ref_index
].
getTileProcessor
().
getTilesX
();
int
tilesY
=
quadCLTs
[
ref_index
].
getTileProcessor
().
getTilesY
();
double
[]
disparity_fg
=
null
;
...
...
@@ -4560,11 +4553,18 @@ public class OpticalFlow {
}
boolean
is_3d
=
mode3d
>
0
;
boolean
gen_stereo
=
is_3d
&&
generate_stereo
;
double
[]
baselines
=
(
gen_stereo
)?
stereo_bases
:
new
double
[]
{
0.0
};
for
(
int
ibase
=
0
;
ibase
<
baselines
.
length
;
ibase
++)
if
(!
gen_stereo
||
generate_stereo_var
[
ibase
])
{
double
stereo_baseline
=
gen_stereo
?
stereo_bases
[
ibase
]
:
0.0
;
// double [] baselines = (gen_stereo)? stereo_bases : new double[] {0.0};
double
[][]
views
=
(
gen_stereo
)?
stereo_views
:
new
double
[][]
{{
0.0
,
0.0
,
0.0
}};
// for (int ibase = 0; ibase < baselines.length; ibase++) if (!gen_stereo || generate_stereo_var[ibase]) {
for
(
int
ibase
=
0
;
ibase
<
views
.
length
;
ibase
++)
if
(!
gen_stereo
||
generate_stereo_var
[
ibase
])
{
// double stereo_baseline = gen_stereo? stereo_bases[ibase] : 0.0;
double
stereo_baseline
=
gen_stereo
?
views
[
ibase
][
0
]
:
0.0
;
boolean
is_stereo
=
gen_stereo
&&
stereo_baseline
>
0
;
double
stereo_baseline_meters
=
0.001
*
stereo_baseline
;
double
view_height_meters
=
0.001
*
views
[
ibase
][
1
];
double
view_back_meters
=
0.001
*
views
[
ibase
][
2
];
// double stereo_back = 3.0; // 0; // -10.0; // meters
// col_mode: 0 - mono, 1 - color
for
(
int
col_mode
=
0
;
col_mode
<
2
;
col_mode
++)
if
(
gen_seq_mono_color
[
col_mode
]){
// skip if not needed
double
[]
selected_disparity
=
(
mode3d
>
1
)?
disparity_bg:
((
mode3d
>
0
)?
disparity_fg:
disparity_raw
);
...
...
@@ -4575,18 +4575,25 @@ public class OpticalFlow {
scenes_suffix
+=
String
.
format
(
"-UM%.1f_%.2f"
,
um_sigma
,
um_weight
);
}
int
num_stereo
=
(
is_stereo
&&
(
mode3d
>
0
))?
2
:
1
;
// only for 3D views
boolean
combine_left_right
=
(
num_stereo
>
1
)
&&
stereo_merge
;
boolean
combine_left_right
=
(
num_stereo
>
1
)
&&
(
stereo_merge
||
(
anaglyth_en
&&
!
toRGB
))
;
ImagePlus
[]
imp_scenes_pair
=
new
ImagePlus
[
num_stereo
];
String
scenes_suffix_pair
=
scenes_suffix
;
for
(
int
nstereo
=
0
;
nstereo
<
num_stereo
;
nstereo
++)
{
double
[]
xyz_offset
=
{
-
stereo_baseline_meters
*
(
nstereo
-
0.5
)
*
(
num_stereo
-
1
),
// x offset
0.0
,
// Y offset
0.0
};
// Z offset
-
view_height_meters
,
// Y offset
-
view_back_meters
};
// Z offset
if
(
num_stereo
>
1
)
{
scenes_suffix
=
scenes_suffix_pair
+
((
nstereo
>
0
)?
"-RIGHT"
:
"-LEFT"
);
// check if opposite
scenes_suffix
+=
stereo_baseline
;
scenes_suffix
+=
"-B"
+
views
[
ibase
][
0
]
;
}
if
(
views
[
ibase
][
1
]
!=
0
)
{
scenes_suffix
+=
"-Y"
+
views
[
ibase
][
1
];
}
if
(
views
[
ibase
][
2
]
!=
0
)
{
scenes_suffix
+=
"-Z"
+
views
[
ibase
][
2
];
}
if
(
generate_mapped
)
{
imp_scenes_pair
[
nstereo
]=
renderSceneSequence
(
clt_parameters
,
// CLTParameters clt_parameters,
...
...
@@ -4662,8 +4669,12 @@ public class OpticalFlow {
if
(
combine_left_right
&&
(
nstereo
==
0
))
{
continue
;
}
if
(
combine_left_right
)
{
// combine pairs multi-threaded
// stack_scenes = new ImageStack(imp_scene.getWidth(),imp_scene.getHeight());
// no_combine, stereo_2_images, stereo_anaglyth
ImagePlus
imp_video
=
imp_scenes_pair
[
nstereo
];
boolean
[]
combine_modes
=
{!
combine_left_right
,
stereo_merge
&&
combine_left_right
,
anaglyth_en
&&
!
toRGB
&&
combine_left_right
};
for
(
int
istereo_mode
=
0
;
istereo_mode
<
combine_modes
.
length
;
istereo_mode
++)
if
(
combine_modes
[
istereo_mode
])
{
// if (combine_left_right) { // combine pairs multi-threaded
if
(
istereo_mode
==
1
)
{
// combine pairs for "Google" VR
final
int
left_width
=
imp_scenes_pair
[
0
].
getWidth
();
final
int
right_width
=
imp_scenes_pair
[
1
].
getWidth
();
final
int
stereo_width
=
left_width
+
right_width
+
stereo_gap
;
...
...
@@ -4705,11 +4716,73 @@ public class OpticalFlow {
}
ImageDtt
.
startAndJoin
(
threads
);
}
// convert stereo_stack to imp_scenes_pair[1], keeping calibration and fps?
imp_scenes_pair
[
1
].
setStack
(
stereo_stack
);
imp_video
=
new
ImagePlus
();
imp_video
.
setImage
(
imp_scenes_pair
[
1
]);
// copy many attributes
imp_video
.
setStack
(
stereo_stack
);
String
title
=
imp_scenes_pair
[
1
].
getTitle
();
imp_scenes_pair
[
1
].
setTitle
(
title
.
replace
(
"-RIGHT"
,
"-STEREO"
));
imp_video
.
setTitle
(
title
.
replace
(
"-RIGHT"
,
"-STEREO"
));
// convert stereo_stack to imp_scenes_pair[1], keeping calibration and fps?
/// imp_scenes_pair[1].setStack(stereo_stack);
/// String title = imp_scenes_pair[1].getTitle();
/// imp_video = new ImagePlus(
/// imp_scenes_pair[1].getTitle().replace("-RIGHT","-STEREO"),
/// stereo_stack);
/// imp_scenes_pair[1].setTitle(title.replace("-RIGHT","-STEREO"));
}
else
if
(
istereo_mode
==
2
)
{
// combine anaglyph
// final Color anaglyph_left = clt_parameters.imp.anaglyph_left;
// final Color anaglyph_right = clt_parameters.imp.anaglyph_right;
final
double
[]
left_rgb
=
{
anaglyph_left
.
getRed
()/
255.0
,
anaglyph_left
.
getGreen
()/
255.0
,
anaglyph_left
.
getBlue
()/
255.0
};
final
double
[]
right_rgb
=
{
anaglyph_right
.
getRed
()/
255.0
,
anaglyph_right
.
getGreen
()/
255.0
,
anaglyph_right
.
getBlue
()/
255.0
};
final
int
left_width
=
imp_scenes_pair
[
0
].
getWidth
();
final
int
left_height
=
imp_scenes_pair
[
0
].
getHeight
();
final
int
nSlices
=
imp_scenes_pair
[
0
].
getStack
().
getSize
();
final
ImageStack
stereo_stack
=
new
ImageStack
(
left_width
,
left_height
);
for
(
int
i
=
0
;
i
<
nSlices
;
i
++)
{
stereo_stack
.
addSlice
(
imp_scenes_pair
[
0
].
getStack
().
getSliceLabel
(
i
+
1
),
new
int
[
left_width
*
left_height
]);
}
if
(
generate_mapped
)
{
final
Thread
[]
threads
=
ImageDtt
.
newThreadArray
(
QuadCLT
.
THREADS_MAX
);
final
AtomicInteger
ai
=
new
AtomicInteger
(
0
);
for
(
int
ithread
=
0
;
ithread
<
threads
.
length
;
ithread
++)
{
threads
[
ithread
]
=
new
Thread
()
{
public
void
run
()
{
int
[]
rgb
=
new
int
[
3
];
for
(
int
nSlice
=
ai
.
getAndIncrement
();
nSlice
<
nSlices
;
nSlice
=
ai
.
getAndIncrement
())
{
int
[]
pixels_stereo
=
(
int
[])
stereo_stack
.
getPixels
(
nSlice
+
1
);
int
[]
pixels_left
=
(
int
[])
imp_scenes_pair
[
0
].
getStack
().
getPixels
(
nSlice
+
1
);
int
[]
pixels_right
=
(
int
[])
imp_scenes_pair
[
1
].
getStack
().
getPixels
(
nSlice
+
1
);
for
(
int
pix
=
0
;
pix
<
pixels_left
.
length
;
pix
++)
{
int
gl
=
((
pixels_left
[
pix
]
&
0xff00
)
>>
8
);
int
gr
=
((
pixels_right
[
pix
]
&
0xff00
)
>>
8
);
rgb
[
0
]
=
((
int
)
Math
.
min
(
gl
*
left_rgb
[
0
]
+
gr
*
right_rgb
[
0
],
255
))
&
0xff
;
rgb
[
1
]
=
((
int
)
Math
.
min
(
gl
*
left_rgb
[
1
]
+
gr
*
right_rgb
[
1
],
255
))
&
0xff
;
rgb
[
2
]
=
((
int
)
Math
.
min
(
gl
*
left_rgb
[
2
]
+
gr
*
right_rgb
[
2
],
255
))
&
0xff
;
pixels_stereo
[
pix
]
=
0xff000000
+
(
rgb
[
0
]
<<
16
)
+
(
rgb
[
1
]
<<
8
)
+
rgb
[
2
];
}
}
}
};
}
ImageDtt
.
startAndJoin
(
threads
);
}
imp_video
=
new
ImagePlus
();
imp_video
.
setImage
(
imp_scenes_pair
[
1
]);
// copy many attributes
imp_video
.
setStack
(
stereo_stack
);
String
title
=
imp_scenes_pair
[
1
].
getTitle
();
imp_video
.
setTitle
(
title
.
replace
(
"-RIGHT"
,
"-ANAGLYPH"
));
/// String title = imp_scenes_pair[1].getTitle();
/// imp_scenes_pair[1].setTitle(title.replace("-RIGHT","-ANAGLYPH"));
}
// if (istereo_mode == 1) {if (combine_left_right) { // combine pairs multi-threaded
String
avi_path
=
null
;
video:
{
...
...
@@ -4720,7 +4793,7 @@ public class OpticalFlow {
mode_avi
,
// int avi_mode,
avi_JPEG_quality
,
// int avi_JPEG_quality,
video_fps
,
// double fps,
imp_scenes_pair
[
nstereo
]);
// ImagePlus imp)
imp_video
);
//
imp_scenes_pair[nstereo]); // ImagePlus imp)
}
catch
(
IOException
e
)
{
// TODO Auto-generated catch block
e
.
printStackTrace
();
...
...
@@ -4731,7 +4804,8 @@ public class OpticalFlow {
if
(
avi_path
==
null
)
{
break
video
;
}
int
img_width
=
imp_scenes_pair
[
nstereo
].
getWidth
();
// int img_width=imp_scenes_pair[nstereo].getWidth();
int
img_width
=
imp_video
.
getWidth
();
int
stereo_width
=
combine_left_right
?
img_width:
0
;
stereo_widths_list
.
add
(
stereo_width
);
if
(!
run_ffmpeg
)
{
...
...
@@ -4772,7 +4846,9 @@ public class OpticalFlow {
System
.
out
.
println
(
"Deleted AVI video file: \""
+
avi_path
+
"\""
);
}
}
}
}
// for (int istereo_mode = 0; istereo_mode < stereo_modes.length; istereo_mode++) if(combine_modes[istereo_mode]) {
}
// if (gen_avi_mono_color[col_mode])
if
(
show_mono_color
[
col_mode
]
&&
generate_mapped
)
{
imp_scenes_pair
[
nstereo
].
show
();
}
...
...
@@ -4874,18 +4950,41 @@ public class OpticalFlow {
imp_constant_mono
.
show
();
}
}
boolean
offset_fg_image
=
true
;
// config later, generate FG image for all stereo views
double
[][]
img_views
=
offset_fg_image
?
stereo_views
:
(
new
double
[][]
{{
0
,
0
,
0
}});
for
(
int
ibase
=
0
;
ibase
<
img_views
.
length
;
ibase
++)
if
(!
offset_fg_image
||
generate_stereo_var
[
ibase
])
{
double
stereo_baseline_meters
=
0.001
*
img_views
[
ibase
][
0
];
double
view_height_meters
=
0.001
*
img_views
[
ibase
][
1
];
double
view_back_meters
=
0.001
*
img_views
[
ibase
][
2
];
double
[]
xyz_offset
=
{
-
stereo_baseline_meters
,
// x offset
-
view_height_meters
,
// Y offset
-
view_back_meters
};
// Z offset
String
scenes_suffix
=
""
;
if
(
img_views
[
ibase
][
0
]
!=
0
)
{
scenes_suffix
+=
"-B"
+
img_views
[
ibase
][
0
];
}
if
(
img_views
[
ibase
][
1
]
!=
0
)
{
scenes_suffix
+=
"-Y"
+
img_views
[
ibase
][
1
];
}
if
(
img_views
[
ibase
][
2
]
!=
0
)
{
scenes_suffix
+=
"-Z"
+
img_views
[
ibase
][
2
];
}
ImagePlus
imp_fg
=
QuadCLT
.
renderGPUFromDSI
(
-
1
,
// final int sensor_mask,
false
,
// final boolean merge_channels,
null
,
// final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters
,
// CLTParameters clt_parameters,
fg_disparity
,
// double [] disparity_ref,
ZERO3
,
// final double [] scene_xyz, // camera center in world coordinates
xyz_offset
,
//
ZERO3, // final double [] scene_xyz, // camera center in world coordinates
ZERO3
,
// final double [] scene_atr, // camera orientation relative to world frame
quadCLTs
[
ref_index
],
// final QuadCLT scene,
quadCLTs
[
ref_index
],
// final QuadCLT ref_scene, // now - may be null - for testing if scene is rotated ref
true
,
// toRGB, // final boolean toRGB,
"GPU-SHIFTED-FOREGROUND"
,
// String suffix,
scenes_suffix
+
"GPU-SHIFTED-FOREGROUND"
,
// String suffix,
threadsMax
,
// int threadsMax,
debugLevel
);
// int debugLevel)
quadCLTs
[
ref_index
].
saveImagePlusInModelDirectory
(
...
...
@@ -4897,12 +4996,12 @@ public class OpticalFlow {
null
,
// final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters
,
// CLTParameters clt_parameters,
fg_disparity
,
// double [] disparity_ref,
ZERO3
,
// final double [] scene_xyz, // camera center in world coordinates
xyz_offset
,
//
ZERO3, // final double [] scene_xyz, // camera center in world coordinates
ZERO3
,
// final double [] scene_atr, // camera orientation relative to world frame
quadCLTs
[
ref_index
],
// final QuadCLT scene,
quadCLTs
[
ref_index
],
// final QuadCLT ref_scene, // now - may be null - for testing if scene is rotated ref
false
,
// toRGB, // final boolean toRGB,
"GPU-SHIFTED-FOREGROUND"
,
// String suffix,
scenes_suffix
+
"GPU-SHIFTED-FOREGROUND"
,
// String suffix,
threadsMax
,
// int threadsMax,
debugLevel
);
// int debugLevel)
quadCLTs
[
ref_index
].
saveImagePlusInModelDirectory
(
...
...
@@ -4914,6 +5013,9 @@ public class OpticalFlow {
imp_fg_mono
.
show
();
}
}
}
ImagePlus
imp_bg
=
QuadCLT
.
renderGPUFromDSI
(
-
1
,
// final int sensor_mask,
false
,
// final boolean merge_channels,
...
...
@@ -5019,7 +5121,6 @@ public class OpticalFlow {
combo_dsn_final
,
// double [][] combo_dsn_final, // dls,
quadCLTs
[
ref_index
],
// QuadCLT scene,
debugLevel
);
// int debugLevel);// > 0
// FIXME: Adjust for incomplete series!!!!!
intersceneMlExport
(
clt_parameters
,
// CLTParameters clt_parameters,
ers_reference
,
// ErsCorrection ers_reference,
...
...
@@ -5030,8 +5131,6 @@ public class OpticalFlow {
}
// ArrayList<String> video_list = new ArrayList<String>();
if
(
videos
!=
null
)
{
videos
[
0
]
=
video_list
.
toArray
(
new
String
[
0
]);
}
...
...
@@ -5041,21 +5140,20 @@ public class OpticalFlow {
stereo_widths
[
0
][
i
]
=
stereo_widths_list
.
get
(
i
);
}
}
if
(
start_ref_pointers
!=
null
)
{
start_ref_pointers
[
0
]
=
earliest_scene
;
}
System
.
out
.
println
(
"buildSeries(): DONE"
);
//
return
quadCLTs
[
ref_index
].
getX3dTopDirectory
();
// return true;
}
public
void
testERS
(
CLTParameters
clt_parameters
,
int
indx0
,
// reference scene in a pair
int
indx1
,
// other scene in a pair
// double [] ref_disparity,
QuadCLT
[]
quadCLTs
,
int
debugLevel
)
{
// First create a pair of images, similar to renderSceneSequence()
// boolean toRGB = true;
boolean
show_color
=
clt_parameters
.
imp
.
show_mapped_color
;
boolean
show_mono
=
clt_parameters
.
imp
.
show_mapped_mono
;
boolean
use_combo_dsi
=
clt_parameters
.
imp
.
use_combo_dsi
;
...
...
@@ -5083,7 +5181,6 @@ public class OpticalFlow {
quadCLTs
[
ref_index
],
// QuadCLT scene,
debugLevel
);
double
[]
disparity_fg
=
ds
[
0
];
// combo_dsn_final[COMBO_DSN_INDX_DISP_FG];
// double d
double
[]
interscene_ref_disparity
=
null
;
// keep null to use old single-scene disparity for interscene matching
if
(
use_combo_dsi
)
{
interscene_ref_disparity
=
ds
[
0
].
clone
();
// use_lma_dsi ?
...
...
@@ -5096,7 +5193,6 @@ public class OpticalFlow {
}
}
// QuadCLT [] other_ref = {quadCLTs[indx1],quadCLTs[indx0]};
int
[]
other_ref
=
{
indx1
,
indx0
};
ErsCorrection
ers_reference
=
quadCLTs
[
ref_index
].
getErsCorrection
();
ImageStack
stack_scenes_color
=
null
;
...
...
@@ -5428,14 +5524,23 @@ public class OpticalFlow {
String
ts
=
quadCLTs
[
nscene
].
getImageName
();
double
[]
scene_xyz
=
ZERO3
;
double
[]
scene_atr
=
ZERO3
;
if
((
nscene
!=
ref_index
)
&&
(
mode3d
>=
0
))
{
// if ((nscene != ref_index) && (mode3d >= 0)) {
if
(
nscene
!=
ref_index
)
{
// Check even for raw, so video frames will match in all modes
scene_xyz
=
ers_reference
.
getSceneXYZ
(
ts
);
scene_atr
=
ers_reference
.
getSceneATR
(
ts
);
if
((
scene_atr
==
null
)
||
(
scene_xyz
==
null
))
{
continue
;
}
if
(
mode3d
>=
0
)
{
double
[]
scene_ers_xyz_dt
=
ers_reference
.
getSceneErsXYZ_dt
(
ts
);
double
[]
scene_ers_atr_dt
=
ers_reference
.
getSceneErsATR_dt
(
ts
);
quadCLTs
[
nscene
].
getErsCorrection
().
setErsDt
(
scene_ers_xyz_dt
,
// double [] ers_xyz_dt,
scene_ers_atr_dt
);
// double [] ers_atr_dt)(ers_scene_original_xyz_dt);
}
else
{
// ugly, restore for raw mode that should not be rotated/shifted
scene_xyz
=
ZERO3
;
scene_atr
=
ZERO3
;
}
}
if
(
stereo_xyz
!=
null
)
{
// offset all, including reference scene
double
[][]
combo_xyzatr
=
ErsCorrection
.
combineXYZATR
(
...
...
@@ -9533,6 +9638,21 @@ public double[][] correlateIntersceneDebug( // only uses GPU and quad
return
disparity_map
;
// disparity_map
}
public
static
int
getEarliestScene
(
QuadCLT
[]
scenes
)
{
int
ref_index
=
scenes
.
length
-
1
;
ErsCorrection
ers_reference
=
scenes
[
ref_index
].
getErsCorrection
();
for
(
int
nscene
=
ref_index
-
1
;
nscene
>=
00
;
nscene
--)
{
String
ts
=
scenes
[
nscene
].
getImageName
();
double
[]
scene_xyz
=
ers_reference
.
getSceneXYZ
(
ts
);
double
[]
scene_atr
=
ers_reference
.
getSceneATR
(
ts
);
if
((
scene_xyz
==
null
)
||
(
scene_atr
==
null
)){
return
nscene
+
1
;
// scene is not matched
}
}
return
0
;
}
// Cleaned up and optimized version to reduce memory usage (on-the-fly integration, not saving full correlation data)
public
double
[][]
correlateInterscene
(
final
CLTParameters
clt_parameters
,
...
...
src/main/java/com/elphel/imagej/tileprocessor/QuadCLT.java
View file @
ee4732b5
...
...
@@ -613,7 +613,7 @@ public class QuadCLT extends QuadCLTCPU {
break
;
}
fill_all
[
0
]
=
anum_gaps
.
get
()
==
0
;
// no new tiles filled
if
(
npass
==
(
num_passes
-
1
)){
if
(
(
debug_level
>
0
)
&&
(
npass
==
(
num_passes
-
1
)
)){
System
.
out
.
println
(
"fillDisparityStrength() LAST PASS ! npass="
+
npass
+
", change="
+
Math
.
sqrt
(
amax_diff
.
get
())+
" ("
+
max_change
+
")"
);
System
.
out
.
println
(
"fillDisparityStrength() LAST PASS ! npass="
+
npass
+
", change="
+
Math
.
sqrt
(
amax_diff
.
get
())+
" ("
+
max_change
+
")"
);
System
.
out
.
println
(
"fillDisparityStrength() LAST PASS ! npass="
+
npass
+
", change="
+
Math
.
sqrt
(
amax_diff
.
get
())+
" ("
+
max_change
+
")"
);
...
...
@@ -2198,6 +2198,27 @@ public class QuadCLT extends QuadCLTCPU {
if
(
full_woi_in
!=
null
)
{
rendered_width
=
full_woi_in
.
width
*
GPUTileProcessor
.
DTT_SIZE
;
}
boolean
showPxPyD
=
false
;
if
(
showPxPyD
)
{
int
dbg_width
=
rendered_width
/
GPUTileProcessor
.
DTT_SIZE
;
int
dbg_height
=
pXpYD
.
length
/
dbg_width
;
double
[][]
dbg_img
=
new
double
[
3
][
pXpYD
.
length
];
for
(
int
i
=
0
;
i
<
dbg_img
.
length
;
i
++)
{
Arrays
.
fill
(
dbg_img
[
i
],
Double
.
NaN
);
}
for
(
int
nTile
=
0
;
nTile
<
pXpYD
.
length
;
nTile
++)
if
(
pXpYD
[
nTile
]
!=
null
){
for
(
int
i
=
0
;
i
<
dbg_img
.
length
;
i
++)
{
dbg_img
[
i
][
nTile
]
=
pXpYD
[
nTile
][
i
];
}
}
(
new
ShowDoubleFloatArrays
()).
showArrays
(
// out of boundary 15
dbg_img
,
dbg_width
,
dbg_height
,
true
,
"pXpYD"
,
new
String
[]
{
"pX"
,
"pY"
,
"Disparity"
});
}
//scene_QuadClt.getTileProcessor().getTileSize();
TpTask
[]
tp_tasks_ref
=
GpuQuad
.
setInterTasks
(
// "true" reference, with stereo actual reference will be offset
scene
.
getNumSensors
(),
...
...
src/main/java/com/elphel/imagej/tileprocessor/QuadCLTCPU.java
View file @
ee4732b5
...
...
@@ -540,7 +540,7 @@ public class QuadCLTCPU {
}
Properties
inter_properties
=
new
Properties
();
String
prefix
=
is_aux
?
PREFIX_AUX:
PREFIX
;
setProperties
(
prefix
,
inter_properties
);
setProperties
(
prefix
,
inter_properties
);
// null pointer
OutputStream
os
;
try
{
os
=
new
FileOutputStream
(
path
);
...
...
@@ -1634,7 +1634,7 @@ public class QuadCLTCPU {
ErsCorrection
ers
=
(
ErsCorrection
)
gc
;
ers
.
setPropertiesPose
(
prefix
,
properties
);
ers
.
setPropertiesERS
(
prefix
,
properties
);
ers
.
setPropertiesScenes
(
prefix
,
properties
);
ers
.
setPropertiesScenes
(
prefix
,
properties
);
// null pointer
ers
.
setPropertiesLineTime
(
prefix
,
properties
);
}
properties
.
setProperty
(
prefix
+
"num_orient"
,
this
.
num_orient
+
""
);
...
...
src/main/java/com/elphel/imagej/tileprocessor/TwoQuadCLT.java
View file @
ee4732b5
...
...
@@ -8563,10 +8563,10 @@ if (debugLevel > -100) return true; // temporarily !
* @param debugLevel
* @throws Exception
*/
public
void
buildSeriesTQ
(
public
static
void
buildSeriesTQ
(
QuadCLT
quadCLT_main
,
// tiles should be set
int
ref_index
,
// -1 - last
int
ref_step
,
int
ref_index
_unused
,
// -1 - last
int
ref_step
_unused
,
// not used here
CLTParameters
clt_parameters
,
EyesisCorrectionParameters
.
DebayerParameters
debayerParameters
,
ColorProcParameters
colorProcParameters
,
...
...
@@ -8583,9 +8583,12 @@ if (debugLevel > -100) return true; // temporarily !
double
stereo_intereye
=
clt_parameters
.
imp
.
stereo_intereye
;
double
stereo_phone_width
=
clt_parameters
.
imp
.
stereo_phone_width
;
// 0 - no padding
boolean
stereo_pad
=
(
stereo_intereye
>
0
)
&&
(
stereo_phone_width
>
0
);
int
video_crf
=
clt_parameters
.
imp
.
video_crf
;
String
video_codec
=
clt_parameters
.
imp
.
video_codec
.
toLowerCase
();
int
video_crf_combo
=
clt_parameters
.
imp
.
video_crf_combo
;
String
video_codec_combo
=
clt_parameters
.
imp
.
video_codec_combo
.
toLowerCase
();
int
min_num_scenes
=
clt_parameters
.
imp
.
min_num_scenes
;
// abandon series if there are less than this number of scenes in it
if
(
min_num_scenes
<
1
)
{
min_num_scenes
=
1
;
}
long
start_time_all
=
System
.
nanoTime
();
OpticalFlow
opticalFlow
=
new
OpticalFlow
(
...
...
@@ -8603,8 +8606,33 @@ if (debugLevel > -100) return true; // temporarily !
num_seq
=
pathFirstLast
.
length
;
}
}
String
[][]
video_lists
=
new
String
[
num_seq
][];
int
[][]
stereo_widths
=
new
int
[
num_seq
][];
class
VideoSet
{
String
[]
video_paths
;
int
[]
stereo_widths
;
int
earliest_scene
,
reference_scene
;
VideoSet
(
String
[]
paths
,
int
[]
stereo_widths
,
int
earliest_scene
,
int
reference_scene
)
{
this
.
video_paths
=
paths
;
this
.
stereo_widths
=
stereo_widths
;
this
.
earliest_scene
=
earliest_scene
;
this
.
reference_scene
=
reference_scene
;
}
String
[]
getVideoPaths
()
{
return
video_paths
;}
int
[]
getStereoWidths
()
{
return
stereo_widths
;}
}
ArrayList
<
VideoSet
>
video_sets_list
=
new
ArrayList
<
VideoSet
>();
// String [][] video_lists = new String [num_seq][];
// int [] earliest_scene_pointer = new int[1];
// int [][] stereo_widths = new int [num_seq][];
for
(
int
nseq
=
0
;
nseq
<
num_seq
;
nseq
++)
{
long
start_time_seq
=
System
.
nanoTime
();
System
.
out
.
println
(
"\nSTARTED PROCESSING SCENE SEQUENCE "
+
nseq
+
" (last is "
+(
num_seq
-
1
)+
")"
);
...
...
@@ -8615,13 +8643,16 @@ if (debugLevel > -100) return true; // temporarily !
pathFirstLast
[
nseq
].
first
,
// int scene_first, // first scene to process
pathFirstLast
[
nseq
].
last
);
// int scene_last); // last scene to process (negative - add length
}
String
[][]
video_list
=
new
String
[
1
][];
int
[][]
widths_list
=
new
int
[
1
][];
int
ref_index
=
-
1
;
// -1 - last
int
[]
start_ref_pointers
=
new
int
[
2
];
while
((
ref_index
<
0
)
||
((
ref_index
+
1
)
>=
min_num_scenes
))
{
String
model_directory
=
opticalFlow
.
buildSeries
(
(
pathFirstLast
!=
null
),
//boolean batch_mode,
quadCLT_main
,
// QuadCLT quadCLT_main, // tiles should be set
ref_index
,
// int ref_index, // -1 - last
ref_step
,
// int ref_step,
clt_parameters
,
// CLTParameters clt_parameters,
debayerParameters
,
// EyesisCorrectionParameters.DebayerParameters debayerParameters,
colorProcParameters
,
// ColorProcParameters colorProcParameters,
...
...
@@ -8632,17 +8663,26 @@ if (debugLevel > -100) return true; // temporarily !
reset_from_extrinsics
,
// boolean reset_from_extrinsics,
video_list
,
// String [][] video_list, // null or list of generated avi or webm paths
widths_list
,
start_ref_pointers
,
// int [] start_ref_pointers,
threadsMax
,
// final int threadsMax, // maximal number of threads to launch
updateStatus
,
// final boolean updateStatus,
debugLevel
+
2
);
// final int debugLevel)
video_lists
[
nseq
]
=
video_list
[
0
];
stereo_widths
[
nseq
]
=
widths_list
[
0
];
System
.
out
.
println
(
"PROCESSING SCENE SEQUENCE "
+
nseq
+
" (last is "
+(
num_seq
-
1
)+
") is FINISHED in "
+
if
(
model_directory
==
null
)
{
System
.
out
.
println
(
"Failed to build sequence for series "
+
ref_index
);
break
;
// and go to the to next scene sequence from the list
}
video_sets_list
.
add
(
new
VideoSet
(
video_list
[
0
],
// String [] paths,
widths_list
[
0
],
// int [] stereo_widths,
start_ref_pointers
[
0
],
// int earliest_scene,
start_ref_pointers
[
1
]));
// int reference_scene);
String
series_action
=
(
start_ref_pointers
[
0
]
<
(
min_num_scenes
-
1
))?
"is FINISHED "
:(
"will continue down from scene "
+(
start_ref_pointers
[
0
]));
System
.
out
.
println
(
"PROCESSING SCENE SEQUENCE "
+
nseq
+
" (last is "
+(
num_seq
-
1
)+
") "
+
series_action
+
" in "
+
IJ
.
d2s
(
0.000000001
*(
System
.
nanoTime
()-
start_time_seq
),
3
)+
" sec ("
+
IJ
.
d2s
(
0.000000001
*(
System
.
nanoTime
()-
start_time_all
),
3
)+
" sec from the overall start"
);
// will open dialog if does not exist
String
linkedModelsDirectory
=
quadCLT_main
.
correctionsParameters
.
selectLinkedModelsDirectory
(
true
,
true
);
if
((
linkedModelsDirectory
!=
null
)
&&
(
linkedModelsDirectory
.
length
()
>
0
)
&&
(
model_directory
!=
null
))
{
if
((
linkedModelsDirectory
!=
null
)
&&
(
linkedModelsDirectory
.
length
()
>
0
))
{
Path
pathAbsolute
=
Paths
.
get
(
model_directory
);
Path
pathBase
=
Paths
.
get
(
linkedModelsDirectory
);
Path
pathRelative
=
pathBase
.
relativize
(
pathAbsolute
);
...
...
@@ -8654,22 +8694,40 @@ if (debugLevel > -100) return true; // temporarily !
}
Files
.
createSymbolicLink
(
link
.
toPath
(),
pathRelative
);
}
if
(
start_ref_pointers
[
0
]
<
(
min_num_scenes
-
1
))
{
break
;
}
ref_index
=
start_ref_pointers
[
0
];
// continue from the same attached to the previous reference
}
}
// combine videos if generated
if
((
video_lists
.
length
>
1
)
&&
(
video_lists
[
0
]
!=
null
)
&&
(
video_lists
[
0
].
length
>
1
))
{
// do not combine if single sequence or no videos
if
((
video_sets_list
.
size
()
>
1
)
&&
(
video_sets_list
.
get
(
0
).
getVideoPaths
()
!=
null
)
&&
(
video_sets_list
.
get
(
0
).
getVideoPaths
().
length
>
0
))
{
// need to sort first video_sets_list!
Collections
.
sort
(
video_sets_list
,
new
Comparator
<
VideoSet
>()
{
@Override
public
int
compare
(
VideoSet
lhs
,
VideoSet
rhs
)
{
// -1 - less than, 1 - greater than, 0 - equal, not inverted for ascending disparity
return
lhs
.
getVideoPaths
()[
0
].
compareTo
(
rhs
.
getVideoPaths
()[
0
]);
}
});
// if ((video_lists.length > 1) && (video_lists[0] != null) && (video_lists[0].length > 1)) { // do not combine if single sequence or no videos
concat_videos:
{
System
.
out
.
println
(
"Generating "
+(
video_
lists
[
0
]
.
length
)+
" combined video files."
);
System
.
out
.
println
(
"Generating "
+(
video_
sets_list
.
get
(
0
).
getVideoPaths
()
.
length
)+
" combined video files."
);
String
videoDirectory
=
quadCLT_main
.
correctionsParameters
.
selectVideoDirectory
(
true
,
true
);
if
(
videoDirectory
==
null
)
{
break
concat_videos
;
}
File
video_dir
=
new
File
(
videoDirectory
);
video_dir
.
mkdirs
();
// Should already exist actually
for
(
int
nvideo
=
0
;
nvideo
<
video_
lists
[
0
]
.
length
;
nvideo
++)
{
for
(
int
nvideo
=
0
;
nvideo
<
video_
sets_list
.
get
(
0
).
getVideoPaths
()
.
length
;
nvideo
++)
{
// get name with <ts_sec_first>-<ts_sec_last>
// String spath0 = video_lists[0][nvideo];
String
name0
=
Paths
.
get
(
video_
lists
[
0
]
[
nvideo
]).
getFileName
().
toString
();
String
name1
=
Paths
.
get
(
video_
lists
[
video_lists
.
length
-
1
]
[
nvideo
]).
getFileName
().
toString
();
String
name0
=
Paths
.
get
(
video_
sets_list
.
get
(
0
).
getVideoPaths
()
[
nvideo
]).
getFileName
().
toString
();
String
name1
=
Paths
.
get
(
video_
sets_list
.
get
(
video_sets_list
.
size
()-
1
).
getVideoPaths
()
[
nvideo
]).
getFileName
().
toString
();
String
ts_sec0
=
name0
.
substring
(
0
,
name0
.
indexOf
(
"_"
));
// seconds of the first timestamp
String
ts_sec1
=
name1
.
substring
(
0
,
name1
.
indexOf
(
"_"
));
// seconds of the last timestamp
String
suffix0
=
name0
.
substring
(
name0
.
indexOf
(
"-"
));
// Skip timestamp
...
...
@@ -8684,16 +8742,16 @@ if (debugLevel > -100) return true; // temporarily !
PrintWriter
writer
=
new
PrintWriter
(
list_to_concat
,
"UTF-8"
);
int
this_stereo_width
=
0
;
int
num_segments
=
0
;
for
(
int
i
=
0
;
i
<
video_
lists
.
length
;
i
++)
{
if
(
(
video_lists
[
i
]
!=
null
)
&&
(
video_lists
[
i
].
length
>
nvideo
)
)
{
if
((
new
File
(
video_
lists
[
i
]
[
nvideo
])).
exists
())
{
writer
.
println
(
"file '"
+
video_
lists
[
i
]
[
nvideo
]+
"'"
);
for
(
int
i
=
0
;
i
<
video_
sets_list
.
size
()
;
i
++)
{
if
(
video_sets_list
.
size
()
>
nvideo
)
{
if
((
new
File
(
video_
sets_list
.
get
(
i
).
getVideoPaths
()
[
nvideo
])).
exists
())
{
writer
.
println
(
"file '"
+
video_
sets_list
.
get
(
i
).
getVideoPaths
()
[
nvideo
]+
"'"
);
if
(
stereo_pad
)
{
this_stereo_width
=
stereo_widths
[
i
]
[
nvideo
];
this_stereo_width
=
video_sets_list
.
get
(
i
).
getStereoWidths
()
[
nvideo
];
}
num_segments
++;
}
else
{
System
.
out
.
println
(
"Missing video segment: "
+
video_
lists
[
i
]
[
nvideo
]);
System
.
out
.
println
(
"Missing video segment: "
+
video_
sets_list
.
get
(
i
).
getVideoPaths
()
[
nvideo
]);
}
}
else
{
System
.
out
.
println
(
"Specific video segment "
+
i
+
":"
+
nvideo
+
" is missing, skipping"
);
...
...
@@ -8720,10 +8778,10 @@ if (debugLevel > -100) return true; // temporarily !
int
padded_width
=
16
*
(
(
int
)
Math
.
round
((
this_stereo_width
+
stereo_gap
)
*
stereo_phone_width
/
stereo_intereye
/
32
));
shellCommand
=
String
.
format
(
"ffmpeg -y -f concat -safe 0 -i %s -r 60 -vf pad=width=%d:height=0:x=-1:y=-1:color=black,setpts=%f*PTS -b:v 0 -crf %d -c %s %s"
,
list_to_concat
.
toString
(),
padded_width
,
pts_scale
,
video_crf
,
video_codec
,
video_out
.
toString
());
list_to_concat
.
toString
(),
padded_width
,
pts_scale
,
video_crf
_combo
,
video_codec_combo
,
video_out
.
toString
());
}
else
{
shellCommand
=
String
.
format
(
"ffmpeg -y -f concat -safe 0 -i %s -r 60 -vf setpts=%f*PTS -b:v 0 -crf %d -c %s %s"
,
list_to_concat
.
toString
(),
pts_scale
,
video_crf
,
video_codec
,
video_out
.
toString
());
list_to_concat
.
toString
(),
pts_scale
,
video_crf
_combo
,
video_codec_combo
,
video_out
.
toString
());
}
Process
p
=
null
;
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment