Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
I
imagej-elphel
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
3
Issues
3
List
Board
Labels
Milestones
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Commits
Issue Boards
Open sidebar
Elphel
imagej-elphel
Commits
7b84dba6
Commit
7b84dba6
authored
Jul 09, 2022
by
Andrey Filippov
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
setup directories in a list file
parent
ee4732b5
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
247 additions
and
79 deletions
+247
-79
EyesisCorrectionParameters.java
...com/elphel/imagej/cameras/EyesisCorrectionParameters.java
+115
-48
OpticalFlow.java
...ain/java/com/elphel/imagej/tileprocessor/OpticalFlow.java
+132
-31
No files found.
src/main/java/com/elphel/imagej/cameras/EyesisCorrectionParameters.java
View file @
7b84dba6
...
...
@@ -34,6 +34,7 @@ import java.nio.file.Path;
import
java.nio.file.Paths
;
import
java.util.ArrayList
;
import
java.util.Collections
;
import
java.util.HashMap
;
import
java.util.HashSet
;
import
java.util.List
;
import
java.util.Properties
;
...
...
@@ -53,6 +54,8 @@ import ij.gui.GenericDialog;
public
class
EyesisCorrectionParameters
{
public
static
class
CorrectionParameters
{
public
static
final
String
[]
KEY_DIRS
=
{
"rootDirectory"
,
"sourceDirectory"
,
"linkedModels"
,
"videoDirectory"
,
"x3dDirectory"
,
"resultsDirectory"
};
public
static
final
String
AUX_PREFIX
=
"AUX-"
;
public
boolean
swapSubchannels01
=
true
;
// false; // (false: 0-1-2, true - 1-0-2)
public
boolean
split
=
true
;
...
...
@@ -855,40 +858,40 @@ public class EyesisCorrectionParameters {
gd
.
addCheckbox
(
"Save current settings with results"
,
this
.
saveSettings
);
gd
.
addTab
(
"Directories"
,
"Direcories paths"
);
gd
.
addStringField
(
"Source files directory"
,
this
.
sourceDirectory
,
6
0
);
gd
.
addStringField
(
"Source files directory"
,
this
.
sourceDirectory
,
8
0
);
gd
.
addCheckbox
(
"Select source directory"
,
false
);
gd
.
addCheckbox
(
"Use individual subdirectory for image set"
,
this
.
use_set_dirs
);
gd
.
addStringField
(
"Source sequences list file"
,
this
.
sourceSequencesList
,
6
0
);
gd
.
addStringField
(
"Source sequences list file"
,
this
.
sourceSequencesList
,
8
0
);
gd
.
addCheckbox
(
"Select source sequences file"
,
false
);
gd
.
addCheckbox
(
"Use source list to iterate multiple sequences"
,
this
.
useSourceList
);
gd
.
addStringField
(
"Sensor calibration directory"
,
this
.
sensorDirectory
,
6
0
);
gd
.
addStringField
(
"Sensor calibration directory"
,
this
.
sensorDirectory
,
8
0
);
gd
.
addCheckbox
(
"Select sensor calibration directory"
,
false
);
gd
.
addStringField
(
"Aberration kernels (sharp) directory"
,
this
.
sharpKernelDirectory
,
6
0
);
gd
.
addStringField
(
"Aberration kernels (sharp) directory"
,
this
.
sharpKernelDirectory
,
8
0
);
gd
.
addCheckbox
(
"Select aberration kernels (sharp) directory"
,
false
);
gd
.
addStringField
(
"Aberration kernels (smooth) directory"
,
this
.
smoothKernelDirectory
,
6
0
);
gd
.
addStringField
(
"Aberration kernels (smooth) directory"
,
this
.
smoothKernelDirectory
,
8
0
);
gd
.
addCheckbox
(
"Select aberration kernels (smooth) directory"
,
false
);
gd
.
addStringField
(
"Aberration kernels for DCT directory"
,
this
.
dctKernelDirectory
,
6
0
);
gd
.
addStringField
(
"Aberration kernels for DCT directory"
,
this
.
dctKernelDirectory
,
8
0
);
gd
.
addCheckbox
(
"Select aberration kernels for DCT directory"
,
false
);
gd
.
addStringField
(
"Aberration kernels for CLT directory"
,
this
.
cltKernelDirectory
,
6
0
);
gd
.
addStringField
(
"Aberration kernels for CLT directory"
,
this
.
cltKernelDirectory
,
8
0
);
gd
.
addCheckbox
(
"Select aberration kernels for CLT directory"
,
false
);
gd
.
addStringField
(
"x3d model version"
,
this
.
x3dModelVersion
,
20
);
// 10a
gd
.
addStringField
(
"JP4 source image copy model subdirectory"
,
this
.
jp4SubDir
,
20
);
// 10b
gd
.
addStringField
(
"Linked reference models"
,
this
.
linkedModels
,
6
0
,
gd
.
addStringField
(
"Linked reference models"
,
this
.
linkedModels
,
8
0
,
"Directory where links to reference models directories will be created."
);
gd
.
addCheckbox
(
"Select linked reference models directory"
,
false
);
gd
.
addStringField
(
"Video directory"
,
this
.
videoDirectory
,
6
0
,
gd
.
addStringField
(
"Video directory"
,
this
.
videoDirectory
,
8
0
,
"Directory to store combined video files."
);
gd
.
addCheckbox
(
"Select video directory"
,
false
);
gd
.
addStringField
(
"x3d output directory"
,
this
.
x3dDirectory
,
6
0
);
gd
.
addStringField
(
"x3d output directory"
,
this
.
x3dDirectory
,
8
0
);
gd
.
addCheckbox
(
"Select x3d output directory"
,
false
);
gd
.
addCheckbox
(
"Use individual subdirectory for each 3d model (timestamp as name)"
,
this
.
use_x3d_subdirs
);
...
...
@@ -897,18 +900,18 @@ public class EyesisCorrectionParameters {
"When using timestamp as a subdirectory, add this prefix"
);
gd
.
addStringField
(
"x3d subdirectory suffix"
,
this
.
x3dSubdirSuffix
,
10
,
"When using timestamp as a subdirectory, add this suffix"
);
gd
.
addStringField
(
"ML output directory"
,
this
.
mlDirectory
,
6
0
,
gd
.
addStringField
(
"ML output directory"
,
this
.
mlDirectory
,
8
0
,
"Non-empty directory with no \"/\" separator makes it a subdirectory of the model version directory"
);
gd
.
addCheckbox
(
"Select ML output directory"
,
false
,
"Erase text field or use \"/\" in it to enable absolute directory path selection"
);
gd
.
addStringField
(
"Equirectangular maps directory (may be empty)"
,
this
.
equirectangularDirectory
,
6
0
);
gd
.
addStringField
(
"Equirectangular maps directory (may be empty)"
,
this
.
equirectangularDirectory
,
8
0
);
gd
.
addCheckbox
(
"Select equirectangular maps directory"
,
false
);
gd
.
addStringField
(
"Results directory"
,
this
.
resultsDirectory
,
6
0
);
gd
.
addStringField
(
"Results directory"
,
this
.
resultsDirectory
,
8
0
);
gd
.
addCheckbox
(
"Select results directory"
,
false
);
gd
.
addTab
(
"Prefix/suffix"
,
"Prefixes and suffixes for various file types"
);
gd
.
addStringField
(
"Source files prefix"
,
this
.
sourcePrefix
,
6
0
);
gd
.
addStringField
(
"Source files suffix"
,
this
.
sourceSuffix
,
6
0
);
gd
.
addStringField
(
"Source files prefix"
,
this
.
sourcePrefix
,
8
0
);
gd
.
addStringField
(
"Source files suffix"
,
this
.
sourceSuffix
,
8
0
);
gd
.
addNumericField
(
"First subcamera (in the source filenames)"
,
this
.
firstSubCamera
,
0
);
gd
.
addNumericField
(
"First subcamera (in config (clt, sensor) directories)"
,
this
.
firstSubCameraConfig
,
0
);
gd
.
addNumericField
(
"Number of subcameras in this camera"
,
this
.
numSubCameras
,
0
);
...
...
@@ -1071,58 +1074,58 @@ public class EyesisCorrectionParameters {
gd
.
addTab
(
"File paths"
,
"Select files and directories paths (common to main and optional auxiliary)"
);
gd
.
addMessage
(
"============ Common to the main and optional auxiliary camera============"
);
gd
.
addStringField
(
"GPU tile_processor_gpu project absolute path"
,
this
.
tile_processor_gpu
,
6
0
,
gd
.
addStringField
(
"GPU tile_processor_gpu project absolute path"
,
this
.
tile_processor_gpu
,
8
0
,
"Keep empty to use default GPU kernels"
);
gd
.
addCheckbox
(
"Select GPU directory"
,
false
);
gd
.
addCheckbox
(
"Save current settings with results"
,
this
.
saveSettings
);
// 1
gd
.
addStringField
(
"Source files directory"
,
this
.
sourceDirectory
,
6
0
);
// 2
gd
.
addStringField
(
"Source files directory"
,
this
.
sourceDirectory
,
8
0
);
// 2
gd
.
addCheckbox
(
"Select source directory"
,
false
);
// 3
gd
.
addCheckbox
(
"Use individual subdirectory for each image set (timestamp as name)"
,
this
.
use_set_dirs
);
//10
gd
.
addStringField
(
"Source sequences list file"
,
this
.
sourceSequencesList
,
6
0
);
// 10x
gd
.
addStringField
(
"Source sequences list file"
,
this
.
sourceSequencesList
,
8
0
);
// 10x
gd
.
addCheckbox
(
"Select source sequences file"
,
false
);
// 10y
gd
.
addCheckbox
(
"Use source list to iterate multiple sequences"
,
this
.
useSourceList
);
// 10z
gd
.
addStringField
(
"x3d model version"
,
this
.
x3dModelVersion
,
6
0
);
// 10a
gd
.
addStringField
(
"jp4 source copy subdirectory"
,
this
.
jp4SubDir
,
6
0
);
// 10b
gd
.
addStringField
(
"x3d model version"
,
this
.
x3dModelVersion
,
8
0
);
// 10a
gd
.
addStringField
(
"jp4 source copy subdirectory"
,
this
.
jp4SubDir
,
8
0
);
// 10b
gd
.
addStringField
(
"Linked reference models"
,
this
.
linkedModels
,
6
0
,
gd
.
addStringField
(
"Linked reference models"
,
this
.
linkedModels
,
8
0
,
"Directory where links to reference models directories will be created."
);
gd
.
addCheckbox
(
"Select linked reference models directory"
,
false
);
gd
.
addStringField
(
"Video directory"
,
this
.
videoDirectory
,
6
0
,
gd
.
addStringField
(
"Video directory"
,
this
.
videoDirectory
,
8
0
,
"Directory to store combined video files."
);
gd
.
addCheckbox
(
"Select video directory"
,
false
);
gd
.
addStringField
(
"x3d output directory"
,
this
.
x3dDirectory
,
6
0
);
// 8
gd
.
addStringField
(
"x3d output directory"
,
this
.
x3dDirectory
,
8
0
);
// 8
gd
.
addCheckbox
(
"Select x3d output (top model) directory"
,
false
);
// 9
gd
.
addCheckbox
(
"Use individual subdirectory for each 3d model (timestamp as name)"
,
this
.
use_x3d_subdirs
);
//10
// gd.addStringField ("Source files prefix", this.sourcePrefix,
6
0); // 13
// gd.addStringField ("Source files suffix", this.sourceSuffix,
6
0); // 14
// gd.addStringField ("Source files prefix", this.sourcePrefix,
8
0); // 13
// gd.addStringField ("Source files suffix", this.sourceSuffix,
8
0); // 14
gd
.
addStringField
(
"x3d subdirectory prefix"
,
this
.
x3dSubdirPrefix
,
10
,
// 14a
"When using timestamp as a subdirectory, add this prefix"
);
gd
.
addStringField
(
"ML output directory"
,
this
.
mlDirectory
,
6
0
,
gd
.
addStringField
(
"ML output directory"
,
this
.
mlDirectory
,
8
0
,
"Non-empty directory with no \"/\" separator makes it a subdirectory of the model version directory"
);
gd
.
addCheckbox
(
"Select ML output directory"
,
false
,
"Erase text field or use \"/\" in it to enable absolute directory path selection"
);
gd
.
addMessage
(
"============ Main camera============"
);
gd
.
addStringField
(
"Sensor calibration directory"
,
this
.
sensorDirectory
,
6
0
);
// 4
gd
.
addStringField
(
"Sensor calibration directory"
,
this
.
sensorDirectory
,
8
0
);
// 4
gd
.
addCheckbox
(
"Select sensor calibration directory"
,
false
);
// 5
gd
.
addStringField
(
"Aberration kernels for CLT directory"
,
this
.
cltKernelDirectory
,
6
0
);
// 6
gd
.
addStringField
(
"Aberration kernels for CLT directory"
,
this
.
cltKernelDirectory
,
8
0
);
// 6
gd
.
addCheckbox
(
"Select aberration kernels for CLT directory"
,
false
);
// 7
gd
.
addStringField
(
"Results directory"
,
this
.
resultsDirectory
,
6
0
);
// 11
gd
.
addStringField
(
"Results directory"
,
this
.
resultsDirectory
,
8
0
);
// 11
gd
.
addCheckbox
(
"Select results directory"
,
false
);
// 12
gd
.
addNumericField
(
"First subcamera (in the source filename)"
,
this
.
firstSubCamera
,
0
);
// 15
gd
.
addNumericField
(
"First subcamera (in config (clt, sensor) directories)"
,
this
.
firstSubCameraConfig
,
0
);
gd
.
addNumericField
(
"Number of subcameras in this camera "
,
this
.
numSubCameras
,
0
);
// 16
gd
.
addStringField
(
"Source files prefix"
,
this
.
sourcePrefix
,
6
0
);
// 13
gd
.
addStringField
(
"Source files suffix"
,
this
.
sourceSuffix
,
6
0
);
// 14
gd
.
addStringField
(
"Source files prefix"
,
this
.
sourcePrefix
,
8
0
);
// 13
gd
.
addStringField
(
"Source files suffix"
,
this
.
sourceSuffix
,
8
0
);
// 14
gd
.
addStringField
(
"Sensor files prefix"
,
this
.
sensorPrefix
,
40
);
// 17
gd
.
addStringField
(
"Sensor files suffix"
,
this
.
sensorSuffix
,
40
);
// 18
...
...
@@ -1132,17 +1135,17 @@ public class EyesisCorrectionParameters {
"When using timestamp as a subdirectory, add this suffix"
);
gd
.
addMessage
(
"============ Auxiliary camera============"
);
gd
.
addStringField
(
"Aux sensor calibration directory"
,
this
.
aux_camera
.
sensorDirectory
,
6
0
);
// 4b
gd
.
addStringField
(
"Aux sensor calibration directory"
,
this
.
aux_camera
.
sensorDirectory
,
8
0
);
// 4b
gd
.
addCheckbox
(
"Select aux sensor calibration directory"
,
false
);
// 5b
gd
.
addStringField
(
"Aberration kernels for aux CLT directory"
,
this
.
aux_camera
.
cltKernelDirectory
,
6
0
);
// 6b
gd
.
addStringField
(
"Aberration kernels for aux CLT directory"
,
this
.
aux_camera
.
cltKernelDirectory
,
8
0
);
// 6b
gd
.
addCheckbox
(
"Select aberration kernels for aux CLT directory"
,
false
);
// 7b
gd
.
addStringField
(
"Aux results directory"
,
this
.
aux_camera
.
resultsDirectory
,
6
0
);
// 11b
gd
.
addStringField
(
"Aux results directory"
,
this
.
aux_camera
.
resultsDirectory
,
8
0
);
// 11b
gd
.
addCheckbox
(
"Select aux results directory"
,
false
);
// 12b
gd
.
addNumericField
(
"First aux subcamera (in the source filename)"
,
this
.
aux_camera
.
firstSubCamera
,
0
);
// 15b
gd
.
addNumericField
(
"First aux subcamera (in config (clt, sensor) directories)"
,
this
.
aux_camera
.
firstSubCameraConfig
,
0
);
gd
.
addNumericField
(
"Number of aux subcameras in this camera "
,
this
.
aux_camera
.
numSubCameras
,
0
);
// 16b
gd
.
addStringField
(
"Aux Source files prefix"
,
this
.
aux_camera
.
sourcePrefix
,
6
0
);
// 13
gd
.
addStringField
(
"Aux Source files suffix"
,
this
.
aux_camera
.
sourceSuffix
,
6
0
);
// 14
gd
.
addStringField
(
"Aux Source files prefix"
,
this
.
aux_camera
.
sourcePrefix
,
8
0
);
// 13
gd
.
addStringField
(
"Aux Source files suffix"
,
this
.
aux_camera
.
sourceSuffix
,
8
0
);
// 14
gd
.
addStringField
(
"Aux sensor files prefix"
,
this
.
aux_camera
.
sensorPrefix
,
40
);
// 17b
gd
.
addStringField
(
"Aux sensor files suffix"
,
this
.
aux_camera
.
sensorSuffix
,
40
);
// 18b
gd
.
addStringField
(
"Aux CLT kernel files prefix"
,
this
.
aux_camera
.
cltKernelPrefix
,
40
);
// 19b
...
...
@@ -1541,12 +1544,83 @@ public class EyesisCorrectionParameters {
e
.
printStackTrace
();
return
null
;
}
Path
basedir
=
seq_path
.
getParent
();
Path
base_path
=
seq_path
.
getParent
();
// first - scan all file and set sourceDirectory, x3dDirectory, linkedModels,videoDirectory,resultsDirectory
// String [] dir_paths = new String[KEY_DIRS.length];
HashMap
<
String
,
String
>
dir_map
=
new
HashMap
<
String
,
String
>();
for
(
String
line:
lines
){
String
[]
tokens
=
line
.
split
(
"#"
)[
0
].
trim
().
split
(
"[\\s,;=]+"
);
if
((
tokens
.
length
>
2
)
&&
(
tokens
[
0
].
toUpperCase
().
equals
(
"SET"
)))
{
parse_set:
{
for
(
String
dir_name:
KEY_DIRS
)
if
(
dir_name
.
equals
(
tokens
[
1
]))
{
dir_map
.
put
(
dir_name
,
tokens
[
2
]);
System
.
out
.
println
(
"Parsed SET: "
+
tokens
[
1
]+
" in line: "
+
line
);
break
parse_set
;
}
System
.
out
.
println
(
"*********** Unknown SET: "
+
tokens
[
1
]+
" in line: "
+
line
);
}
}
}
if
(
dir_map
.
containsKey
(
"rootDirectory"
))
{
base_path
=
base_path
.
resolve
(
Paths
.
get
(
dir_map
.
get
(
"rootDirectory"
)));
File
base_dir
=
new
File
(
base_path
.
toString
());
if
(!
base_dir
.
exists
())
{
base_dir
.
mkdirs
();
}
}
// set sourceDirectory:
if
(
dir_map
.
containsKey
(
"sourceDirectory"
))
{
this
.
sourceDirectory
=(
base_path
.
resolve
(
Paths
.
get
(
dir_map
.
get
(
"sourceDirectory"
)))).
toString
();
}
if
((
this
.
sourceDirectory
==
null
)
||
(
this
.
sourceDirectory
.
trim
().
length
()
==
0
)
||
!(
new
File
(
this
.
sourceDirectory
).
exists
()))
{
System
.
out
.
println
(
"Problem with source scenes directory ("
+
this
.
sourceDirectory
+
", using current: "
+
seq_path
.
getParent
());
this
.
sourceDirectory
=
seq_path
.
getParent
().
toString
();
}
Path
source_path
=
Paths
.
get
(
this
.
sourceDirectory
);
File
source_dir
=
new
File
(
source_path
.
toString
());
if
(!
source_dir
.
exists
())
{
source_dir
.
mkdirs
();
}
// Set other directories (possibly relative to base_path)
for
(
int
i
=
2
;
i
<
KEY_DIRS
.
length
;
i
++)
{
// skip "rootDirectory" and "sourceDirectory"
if
(
dir_map
.
containsKey
(
KEY_DIRS
[
i
]))
{
Path
dir_path
=
base_path
.
resolve
(
Paths
.
get
(
dir_map
.
get
(
KEY_DIRS
[
i
])));
File
dir_file
=
new
File
(
dir_path
.
toString
());
if
(!
dir_file
.
exists
())
{
dir_file
.
mkdirs
();
}
switch
(
i
)
{
case
2
:
this
.
linkedModels
=
dir_path
.
toString
();
System
.
out
.
println
(
"this.linkedModels="
+
this
.
linkedModels
);
break
;
case
3
:
this
.
videoDirectory
=
dir_path
.
toString
();
System
.
out
.
println
(
"this.videoDirectory="
+
this
.
videoDirectory
);
break
;
case
4
:
this
.
x3dDirectory
=
dir_path
.
toString
();
System
.
out
.
println
(
"this.x3dDirectory="
+
this
.
x3dDirectory
);
break
;
case
5
:
this
.
resultsDirectory
=
dir_path
.
toString
();
System
.
out
.
println
(
"this.resultsDirectory="
+
this
.
resultsDirectory
);
break
;
}
}
}
// process source sequence directories
ArrayList
<
PathFirstLast
>
path_list
=
new
ArrayList
<
PathFirstLast
>();
for
(
String
line:
lines
){
String
[]
tokens
=
line
.
split
(
"#"
)[
0
].
trim
().
split
(
"[\\s,;]+"
);
if
((
tokens
.
length
>
0
)
&&
(
tokens
[
0
].
length
()
>
0
))
{
Path
dir_path
=
basedir
.
resolve
(
Paths
.
get
(
tokens
[
0
]));
String
[]
tokens
=
line
.
split
(
"#"
)[
0
].
trim
().
split
(
"[\\s,;=]+"
);
if
((
tokens
.
length
>
0
)
&&
(
tokens
[
0
].
length
()
>
0
)
&&
(!
tokens
[
0
].
toUpperCase
().
equals
(
"SET"
))
)
{
Path
dir_path
=
source_path
.
resolve
(
Paths
.
get
(
tokens
[
0
]));
path_list
.
add
(
new
PathFirstLast
(
dir_path
.
toString
(),
((
tokens
.
length
>
1
)?
Integer
.
parseInt
(
tokens
[
1
]):
0
),
...
...
@@ -1555,14 +1629,7 @@ public class EyesisCorrectionParameters {
}
return
path_list
.
toArray
(
new
PathFirstLast
[
0
]);
}
//Path newPath = path.resolve(childPath);
/*
public static File [] getSeqScenes(String seq_path) {
File seq_file = new File(seq_path);
File [] scene_files =seq_file.listFiles();
return scene_files; // may contain non-directories, will be filtered by filterScenes
}
*/
public
boolean
selectSourceSets
(
int
debugLevel
)
{
// PathFirstLast [] pfl=getSourceSets(this.sourceSequencesList);
...
...
src/main/java/com/elphel/imagej/tileprocessor/OpticalFlow.java
View file @
7b84dba6
...
...
@@ -2248,6 +2248,7 @@ public class OpticalFlow {
String
title
=
reference_QuadCLT
.
getImageName
()+
"-"
+
scene_QuadCLT
.
image_name
+
suffix
;
double
[][]
dsrbg
=
transformCameraVew
(
// shifts previous image correctly (right)
title
,
// final String title,
null
,
// final double [][] dsrbg_camera_in,
camera_xyz0
,
// double [] camera_xyz, // camera center in world coordinates
camera_atr0
,
//double [] camera_atr, // camera orientation relative to world frame
scene_QuadCLT
,
// QuadCLT camera_QuadClt,
...
...
@@ -2257,6 +2258,7 @@ public class OpticalFlow {
if
(
blur_reference
)
{
dsrbg_ref
=
transformCameraVew
(
// shifts previous image correctly (right)
title
+
"-reference"
,
// final String title,
null
,
// final double [][] dsrbg_camera_in,
ZERO3
,
// camera_xyz0, // double [] camera_xyz, // camera center in world coordinates
ZERO3
,
// camera_atr0, // double [] camera_atr, // camera orientation relative to world frame
reference_QuadCLT
,
// scene_QuadCLT, // QuadCLT camera_QuadClt,
...
...
@@ -2266,7 +2268,6 @@ public class OpticalFlow {
dsrbg_ref
=
reference_QuadCLT
.
getDSRBG
();
}
double
[][][]
pair
=
{
dsrbg_ref
,
dsrbg
};
TileProcessor
tp
=
reference_QuadCLT
.
getTileProcessor
();
int
tilesX
=
tp
.
getTilesX
();
int
tilesY
=
tp
.
getTilesY
();
...
...
@@ -2321,6 +2322,7 @@ public class OpticalFlow {
ers_scene
.
setupERS
();
dsrbg
[
i
]
=
transformCameraVew
(
// shifts previous image correctly (right) null pointer
title
,
// final String title,
null
,
// final double [][] dsrbg_camera_in,
scene_xyzatr
[
indx
][
0
],
// double [] camera_xyz, // camera center in world coordinates
scene_xyzatr
[
indx
][
1
],
//double [] camera_atr, // camera orientation relative to world frame
scenes
[
indx
],
// QuadCLT camera_QuadClt,
...
...
@@ -2497,6 +2499,7 @@ public class OpticalFlow {
scene_ers_xyz_dt
,
// double [] ers_xyz_dt,
scene_ers_atr_dt
);
// double [] ers_atr_dt)(ers_scene_original_xyz_dt);
//setupERS() will be inside transformToScenePxPyD()
// OK to use the same reference_QuadClt for both reference_QuadClt and scene_QuadClt
double
[][]
scene_pXpYD
=
transformToScenePxPyD
(
// will be null for disparity == NaN
null
,
// final Rectangle [] extra_woi, // show larger than sensor WOI (or null)
disparity_ref
,
// final double [] disparity_ref, // invalid tiles - NaN in disparity (maybe it should not be masked by margins?)
...
...
@@ -2505,7 +2508,7 @@ public class OpticalFlow {
scene_QuadClt
,
// final QuadCLT scene_QuadClt,
reference_QuadClt
);
// final QuadCLT reference_QuadClt)
TpTask
[]
tp_tasks
=
GpuQuad
.
setInterTasks
(
TpTask
[]
tp_tasks
=
GpuQuad
.
setInterTasks
(
// just to calculate valid_tiles
scene_QuadClt
.
getNumSensors
(),
scene_QuadClt
.
getGeometryCorrection
().
getSensorWH
()[
0
],
!
scene_QuadClt
.
hasGPU
(),
// final boolean calcPortsCoordinatesAndDerivatives, // GPU can calculate them centreXY
...
...
@@ -2516,7 +2519,7 @@ public class OpticalFlow {
margin
,
// final int margin, // do not use tiles if their centers are closer to the edges
valid_tiles
,
// final boolean [] valid_tiles,
threadsMax
);
// final int threadsMax) // maximal number of threads to launch
//FIXME: not clear here tp_tasks was supposed to go?
//FIXME: not clear here tp_tasks was supposed to go?
no
/*
scene_QuadClt.getGPU().setInterTasks(
scene_pXpYD, // final double [][] pXpYD, // per-tile array of pX,pY,disparity triplets (or nulls)
...
...
@@ -2646,7 +2649,7 @@ public class OpticalFlow {
};
}
ImageDtt
.
startAndJoin
(
threads
);
double
[][]
toref_pXpYD
=
transformFromScenePxPyD
(
double
[][]
toref_pXpYD
=
transformFromScenePxPyD
(
// does not look at identity scene_xyz, scene_atr
scene_pXpYD
,
// final double [][] pXpYD_scene, // tiles correspond to reference, pX,pY,D - for scene
scene_xyz
,
// final double [] scene_xyz, // camera center in world coordinates
scene_atr
,
// final double [] scene_atr, // camera orientation relative to world frame
...
...
@@ -2780,9 +2783,9 @@ public class OpticalFlow {
}
}
}
final
boolean
ref_is_identity
=
false
;
///
(scene_xyz[0]==0.0) && (scene_xyz[1]==0.0) && (scene_xyz[2]==0.0) &&
///
(scene_atr[0]==0.0) && (scene_atr[1]==0.0) && (scene_atr[2]==0.0);
final
boolean
ref_is_identity
=
(
scene_xyz
[
0
]==
0.0
)
&&
(
scene_xyz
[
1
]==
0.0
)
&&
(
scene_xyz
[
2
]==
0.0
)
&&
(
scene_atr
[
0
]==
0.0
)
&&
(
scene_atr
[
1
]==
0.0
)
&&
(
scene_atr
[
2
]==
0.0
);
final
double
[]
disparity_ref
=
dref
;
// final int tilesX_ref = ref_w;
// final int tilesY_ref = ref_h;
...
...
@@ -3243,6 +3246,7 @@ public class OpticalFlow {
* Transform scene view to visually match with a reference scene. It is not accurate as it uses resampling and
* related low pass filtering.
* @param title image title to print
* @param dsrbg_camera_in - null (old compatibility) or [variable_length][tiles] array of disparity, strength, ... for the camera tiles
* @param scene_xyz Scene X (right),Y (up), Z (negative away form camera) in the reference camera coordinates
* or null to use scene instance coordinates.
* @param scene_atr Scene azimuth, tilt and roll (or null to use scene instance).
...
...
@@ -3253,12 +3257,14 @@ public class OpticalFlow {
*/
public
double
[][]
transformCameraVew
(
final
String
title
,
final
double
[][]
dsrbg_camera_in
,
final
double
[]
scene_xyz
,
// camera center in world coordinates
final
double
[]
scene_atr
,
// camera orientation relative to world frame
final
QuadCLT
scene_QuadClt
,
final
QuadCLT
reference_QuadClt
,
final
int
iscale
)
{
boolean
debug
=
(
title
!=
null
)
&&
(
title
.
length
()
>
0
);
final
double
line_error
=
0.5
;
TileProcessor
tp
=
reference_QuadClt
.
getTileProcessor
();
final
int
tilesX
=
tp
.
getTilesX
();
...
...
@@ -3271,9 +3277,9 @@ public class OpticalFlow {
final
int
stilesX
=
iscale
*
tilesX
;
final
int
stilesY
=
iscale
*
tilesY
;
final
int
stiles
=
stilesX
*
stilesY
;
final
double
sigma
=
0.5
*
iscale
;
final
double
sigma
=
0.5
*
iscale
;
// was 0.5
final
double
scale
=
1.0
*
iscale
/
transform_size
;
final
double
[][]
dsrbg_camera
=
scene_QuadClt
.
getDSRBG
()
;
final
double
[][]
dsrbg_camera
=
(
dsrbg_camera_in
==
null
)
?
scene_QuadClt
.
getDSRBG
()
:
dsrbg_camera_in
;
if
(
dsrbg_camera
==
null
)
{
return
null
;
}
...
...
@@ -3288,14 +3294,15 @@ public class OpticalFlow {
final
ErsCorrection
ersSceneCorrection
=
scene_QuadClt
.
getErsCorrection
();
ersReferenceCorrection
.
setupERS
();
// just in case - setUP using instance paRAMETERS
ersSceneCorrection
.
setupERS
();
System
.
out
.
println
(
"\ntransformCameraVew(): >> "
+
title
+
" <<"
);
System
.
out
.
println
(
"Reference scene ("
+
reference_QuadClt
.
getImageName
()+
"):"
);
if
(
debug
)
{
System
.
out
.
println
(
"\ntransformCameraVew(): transformCameraVew(): >> "
+
title
+
" <<"
);
System
.
out
.
println
(
"transformCameraVew(): Reference scene ("
+
reference_QuadClt
.
getImageName
()+
"):"
);
ersReferenceCorrection
.
printVectors
(
null
,
null
);
System
.
out
.
println
(
"
Target scene ("
+
scene_QuadClt
.
getImageName
()+
"):"
);
System
.
out
.
println
(
"transformCameraVew():
Target scene ("
+
scene_QuadClt
.
getImageName
()+
"):"
);
ersSceneCorrection
.
printVectors
(
scene_xyz
,
scene_atr
);
}
final
Thread
[]
threads
=
ImageDtt
.
newThreadArray
(
threadsMax
);
final
AtomicInteger
ai
=
new
AtomicInteger
(
0
);
// final double [] zbuffer = new double [tiles];
DoubleAccumulator
[]
azbuffer
=
new
DoubleAccumulator
[
tiles
];
for
(
int
ithread
=
0
;
ithread
<
threads
.
length
;
ithread
++)
{
threads
[
ithread
]
=
new
Thread
()
{
...
...
@@ -3360,6 +3367,15 @@ public class OpticalFlow {
}
ImageDtt
.
startAndJoin
(
threads
);
ai
.
set
(
0
);
if
(
debug
)
{
(
new
ShowDoubleFloatArrays
()).
showArrays
(
ds
,
stilesX
,
stilesY
,
true
,
"ds-0"
,
new
String
[]
{
"D"
,
"S"
});
}
for
(
int
ithread
=
0
;
ithread
<
threads
.
length
;
ithread
++)
{
threads
[
ithread
]
=
new
Thread
()
{
...
...
@@ -3379,6 +3395,15 @@ public class OpticalFlow {
}
ImageDtt
.
startAndJoin
(
threads
);
ai
.
set
(
0
);
if
(
debug
)
{
(
new
ShowDoubleFloatArrays
()).
showArrays
(
ds
,
stilesX
,
stilesY
,
true
,
"ds-1"
,
new
String
[]
{
"D"
,
"S"
});
}
final
double
[][]
dsrbg_out
=
new
double
[
dsrbg_camera
.
length
][
tiles
];
final
int
[][]
num_non_nan
=
new
int
[
dsrbg_out
.
length
]
[
tiles
];
...
...
@@ -3411,6 +3436,15 @@ public class OpticalFlow {
};
}
ImageDtt
.
startAndJoin
(
threads
);
if
(
debug
)
{
(
new
ShowDoubleFloatArrays
()).
showArrays
(
dsrbg_out
,
tilesX
,
tilesY
,
true
,
"dsrbg_out-0"
);
}
for
(
int
i
=
0
;
i
<
dsrbg_out
.
length
;
i
++)
{
for
(
int
j
=
0
;
j
<
tiles
;
j
++)
{
...
...
@@ -3421,7 +3455,15 @@ public class OpticalFlow {
}
}
}
if
(
debug
)
{
(
new
ShowDoubleFloatArrays
()).
showArrays
(
dsrbg_out
,
tilesX
,
tilesY
,
true
,
"dsrbg_out-1"
);
}
/*
if (num_passes > 0) {
for (int i = 0; i < dsrbg_out.length; i++) {
dsrbg_out[i] = tp.fillNaNs(
...
...
@@ -3433,6 +3475,15 @@ public class OpticalFlow {
threadsMax); // final int threadsMax) // maximal number of threads to launch
}
}
if (debug) {
(new ShowDoubleFloatArrays()).showArrays(
dsrbg_out,
tilesX,
tilesY,
true,
"dsrbg_out-2");
}
*/
return
dsrbg_out
;
}
...
...
@@ -4444,7 +4495,9 @@ public class OpticalFlow {
int
tilesX
=
quadCLTs
[
ref_index
].
getTileProcessor
().
getTilesX
();
int
tilesY
=
quadCLTs
[
ref_index
].
getTileProcessor
().
getTilesY
();
double
[]
disparity_fg
=
null
;
double
[]
strength_fg
=
null
;
double
[]
disparity_bg
=
null
;
double
[]
strength_bg
=
null
;
double
[]
disparity_raw
=
null
;
if
(
generate_mapped
)
{
disparity_raw
=
new
double
[
tilesX
*
tilesY
];
...
...
@@ -4468,6 +4521,7 @@ public class OpticalFlow {
debugLevel
);
disparity_fg
=
ds
[
0
];
// combo_dsn_final[COMBO_DSN_INDX_DISP_FG];
strength_fg
=
ds
[
1
];
// BG mode
double
[]
bg_lma
=
combo_dsn_final
[
COMBO_DSN_INDX_DISP_BG_ALL
].
clone
();
double
[]
bg_str
=
combo_dsn_final
[
COMBO_DSN_INDX_STRENGTH
].
clone
();
...
...
@@ -4495,7 +4549,7 @@ public class OpticalFlow {
quadCLTs
[
ref_index
],
// QuadCLT scene,
debugLevel
);
disparity_bg
=
ds_bg
[
0
];
// combo_dsn_final[COMBO_DSN_INDX_DISP_FG];
strength_bg
=
ds_bg
[
1
];
// for now using disparity for just standard size (90x64), later may use full size and at
// minimum fill peripheral areas with Laplassian?
double
[][]
dxyzatr_dt
=
new
double
[
quadCLTs
.
length
][];
...
...
@@ -4568,6 +4622,12 @@ public class OpticalFlow {
// col_mode: 0 - mono, 1 - color
for
(
int
col_mode
=
0
;
col_mode
<
2
;
col_mode
++)
if
(
gen_seq_mono_color
[
col_mode
]){
// skip if not needed
double
[]
selected_disparity
=
(
mode3d
>
1
)?
disparity_bg:
((
mode3d
>
0
)?
disparity_fg:
disparity_raw
);
double
[]
selected_strength
=
(
mode3d
>
1
)?
strength_bg:
((
mode3d
>
0
)?
strength_fg:
null
);
if
(
selected_strength
!=
null
)
{
// for FG/BG only, fixing for transformCameraVew()
for
(
int
i
=
0
;
i
<
selected_disparity
.
length
;
i
++)
{
if
(!
Double
.
isNaN
(
selected_disparity
[
i
])
&&
(
selected_strength
[
i
]
==
0
))
selected_strength
[
i
]
=
0.01
;
// transformCameraVew ignores strength= 0
}
}
final
boolean
toRGB
=
col_mode
>
0
;
String
scenes_suffix
=
quadCLTs
[
quadCLTs
.
length
-
1
].
getImageName
()+
"-SEQ-"
+
IntersceneMatchParameters
.
MODES3D
[
mode3d
+
1
]
+
"-"
+(
toRGB
?
"COLOR"
:
"MONO"
);
...
...
@@ -4593,7 +4653,17 @@ public class OpticalFlow {
if
(
views
[
ibase
][
2
]
!=
0
)
{
scenes_suffix
+=
"-Z"
+
views
[
ibase
][
2
];
}
double
[][]
ds_vantage
=
new
double
[][]
{
selected_disparity
,
selected_strength
};
if
((
views
[
ibase
][
0
]
!=
0
)
||
(
views
[
ibase
][
1
]
!=
0
)
||
(
views
[
ibase
][
2
]
!=
0
))
{
ds_vantage
=
transformCameraVew
(
null
,
// (debug_ds_fg_virt?"transformCameraVew":null), // final String title,
ds_vantage
,
// final double [][] dsrbg_camera_in,
xyz_offset
,
// _inverse[0], // final double [] scene_xyz, // camera center in world coordinates
ZERO3
,
// _inverse[1], // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs
[
ref_index
],
// final QuadCLT scene_QuadClt,
quadCLTs
[
ref_index
],
// final QuadCLT reference_QuadClt,
8
);
// iscale); // final int iscale);
}
if
(
generate_mapped
)
{
imp_scenes_pair
[
nstereo
]=
renderSceneSequence
(
clt_parameters
,
// CLTParameters clt_parameters,
...
...
@@ -4603,7 +4673,7 @@ public class OpticalFlow {
xyz_offset
,
// double [] stereo_offset, // offset reference camera {x,y,z}
sensor_mask
,
// int sensor_mask,
scenes_suffix
,
// String suffix,
selected_disparity
,
// double [] ref_disparity,
ds_vantage
[
0
],
//
selected_disparity, // double [] ref_disparity,
quadCLTs
,
// QuadCLT [] quadCLTs,
debugLevel
);
// int debugLevel);
if
(
save_mapped_mono_color
[
col_mode
])
{
...
...
@@ -4870,14 +4940,14 @@ public class OpticalFlow {
combo_dsn_final
[
COMBO_DSN_INDX_LMA
],
combo_dsn_final
[
COMBO_DSN_INDX_STRENGTH
]
};
double
[][]
ds
=
conditionInitialDS
(
double
[][]
ds
_fg
=
conditionInitialDS
(
true
,
// boolean use_conf, // use configuration parameters, false - use following
clt_parameters
,
// CLTParameters clt_parameters,
dls
,
// double [][] dls
quadCLTs
[
ref_index
],
// QuadCLT scene,
debugLevel
);
double
[]
fg_disparity
=
ds
[
0
];
// combo_dsn_final[COMBO_DSN_INDX_DISP_FG];
double
[]
fg_disparity
=
ds
_fg
[
0
];
// combo_dsn_final[COMBO_DSN_INDX_DISP_FG];
double
[]
bg_lma
=
combo_dsn_final
[
COMBO_DSN_INDX_DISP_BG_ALL
].
clone
();
double
[]
bg_str
=
combo_dsn_final
[
COMBO_DSN_INDX_STRENGTH
].
clone
();
...
...
@@ -4951,8 +5021,13 @@ public class OpticalFlow {
}
}
boolean
offset_fg_image
=
true
;
// config later, generate FG image for all stereo views
boolean
offset_fg_image
=
false
;
//
true; // config later, generate FG image for all stereo views
double
[][]
img_views
=
offset_fg_image
?
stereo_views
:
(
new
double
[][]
{{
0
,
0
,
0
}});
double
min_str
=
0.01
;
for
(
int
i
=
0
;
i
<
ds_fg
[
0
].
length
;
i
++)
{
if
(!
Double
.
isNaN
(
ds_fg
[
0
][
i
])
&&
(
ds_fg
[
1
][
i
]
==
0
))
ds_fg
[
1
][
i
]
=
min_str
;
// transformCameraVew ignores strength= 0
if
(!
Double
.
isNaN
(
ds_bg
[
0
][
i
])
&&
(
ds_bg
[
1
][
i
]
==
0
))
ds_bg
[
1
][
i
]
=
min_str
;
}
for
(
int
ibase
=
0
;
ibase
<
img_views
.
length
;
ibase
++)
if
(!
offset_fg_image
||
generate_stereo_var
[
ibase
])
{
double
stereo_baseline_meters
=
0.001
*
img_views
[
ibase
][
0
];
double
view_height_meters
=
0.001
*
img_views
[
ibase
][
1
];
...
...
@@ -4961,7 +5036,7 @@ public class OpticalFlow {
-
stereo_baseline_meters
,
// x offset
-
view_height_meters
,
// Y offset
-
view_back_meters
};
// Z offset
double
[]
atr_offset
=
ZERO3
;
String
scenes_suffix
=
""
;
if
(
img_views
[
ibase
][
0
]
!=
0
)
{
scenes_suffix
+=
"-B"
+
img_views
[
ibase
][
0
];
...
...
@@ -4972,13 +5047,38 @@ public class OpticalFlow {
if
(
img_views
[
ibase
][
2
]
!=
0
)
{
scenes_suffix
+=
"-Z"
+
img_views
[
ibase
][
2
];
}
// calculate virtual view fg_ds_virt from the reference ds_fg;
boolean
debug_ds_fg_virt
=
false
;
// false;
double
[][]
ds_fg_virt
=
ds_fg
;
if
((
img_views
[
ibase
][
0
]
!=
0
)
||
(
img_views
[
ibase
][
1
]
!=
0
)
||
(
img_views
[
ibase
][
2
]
!=
0
))
{
ds_fg_virt
=
transformCameraVew
(
(
debug_ds_fg_virt
?
"transformCameraVew"
:
null
),
// final String title,
ds_fg
,
// final double [][] dsrbg_camera_in,
xyz_offset
,
// _inverse[0], // final double [] scene_xyz, // camera center in world coordinates
atr_offset
,
// _inverse[1], // final double [] scene_atr, // camera orientation relative to world frame
quadCLTs
[
ref_index
],
// final QuadCLT scene_QuadClt,
quadCLTs
[
ref_index
],
// final QuadCLT reference_QuadClt,
8
);
// iscale); // final int iscale);
if
(
debug_ds_fg_virt
){
int
dbgX
=
quadCLTs
[
ref_index
].
getTileProcessor
().
getTilesX
();
int
dbgY
=
quadCLTs
[
ref_index
].
getTileProcessor
().
getTilesY
();
double
[][]
dbg_img
=
new
double
[][]
{
ds_fg
[
0
],
ds_fg_virt
[
0
],
ds_fg
[
1
],
ds_fg_virt
[
1
]};
(
new
ShowDoubleFloatArrays
()).
showArrays
(
dbg_img
,
dbgX
,
dbgY
,
true
,
"virtual-view-ds"
,
new
String
[]
{
"d-ref"
,
"d-virt"
,
"s-ref"
,
"s-virt"
});
// dsrbg_titles);
}
}
ImagePlus
imp_fg
=
QuadCLT
.
renderGPUFromDSI
(
-
1
,
// final int sensor_mask,
false
,
// final boolean merge_channels,
null
,
// final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters
,
// CLTParameters clt_parameters,
fg_disparity
,
// double [] disparity_ref,
ds_fg_virt
[
0
],
//
fg_disparity, // double [] disparity_ref,
xyz_offset
,
// ZERO3, // final double [] scene_xyz, // camera center in world coordinates
ZERO3
,
// final double [] scene_atr, // camera orientation relative to world frame
quadCLTs
[
ref_index
],
// final QuadCLT scene,
...
...
@@ -4995,7 +5095,7 @@ public class OpticalFlow {
false
,
// final boolean merge_channels,
null
,
// final Rectangle full_woi_in, // show larger than sensor WOI (or null)
clt_parameters
,
// CLTParameters clt_parameters,
fg_disparity
,
// double [] disparity_ref,
ds_fg_virt
[
0
],
//
fg_disparity, // double [] disparity_ref,
xyz_offset
,
// ZERO3, // final double [] scene_xyz, // camera center in world coordinates
ZERO3
,
// final double [] scene_atr, // camera orientation relative to world frame
quadCLTs
[
ref_index
],
// final QuadCLT scene,
...
...
@@ -12896,6 +12996,7 @@ public double[][] correlateIntersceneDebug( // only uses GPU and quad
String
title
=
this_image_name
+
"-"
+
scene_QuadCLT
.
image_name
+
"-dt"
+
dt
;
double
[][]
dsrbg
=
transformCameraVew
(
// shifts previous image correctly (right)
title
,
// final String title,
null
,
// final double [][] dsrbg_camera_in,
camera_xyz0
,
// double [] camera_xyz, // camera center in world coordinates
camera_atr0
,
//double [] camera_atr, // camera orientation relative to world frame
scene_QuadCLT
,
// QuadCLT camera_QuadClt,
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment