Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
I
imagej-elphel
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
3
Issues
3
List
Board
Labels
Milestones
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Commits
Issue Boards
Open sidebar
Elphel
imagej-elphel
Commits
369f2898
Commit
369f2898
authored
Mar 24, 2017
by
Andrey Filippov
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
disparity expansion
parent
fcbcecae
Changes
3
Show whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
1179 additions
and
366 deletions
+1179
-366
EyesisCorrectionParameters.java
src/main/java/EyesisCorrectionParameters.java
+149
-4
QuadCLT.java
src/main/java/QuadCLT.java
+162
-5
TileProcessor.java
src/main/java/TileProcessor.java
+868
-357
No files found.
src/main/java/EyesisCorrectionParameters.java
View file @
369f2898
...
@@ -2055,7 +2055,8 @@ public class EyesisCorrectionParameters {
...
@@ -2055,7 +2055,8 @@ public class EyesisCorrectionParameters {
public
double
or_absVert
=
0.19
;
// Minimal vertical absolute scaled offset ortho strength needed for replacement
public
double
or_absVert
=
0.19
;
// Minimal vertical absolute scaled offset ortho strength needed for replacement
public
boolean
poles_fix
=
true
;
// Continue vertical structures to the ground
public
boolean
poles_fix
=
true
;
// Continue vertical structures to the ground
public
int
poles_len
=
50
;
// Number of tiles to extend over the poles bottoms
public
int
poles_len
=
25
;
// Number of tiles to extend over the poles bottoms
public
double
poles_ratio
=
1.0
;
// Maximal ratio of invisible to visible pole length
public
double
poles_min_strength
=
0.1
;
// Set new pole segment strength to max of horizontal correlation and this value
public
double
poles_min_strength
=
0.1
;
// Set new pole segment strength to max of horizontal correlation and this value
public
boolean
poles_force_disp
=
true
;
// Set disparity to that of the bottom of existing segment (false - use hor. disparity)
public
boolean
poles_force_disp
=
true
;
// Set disparity to that of the bottom of existing segment (false - use hor. disparity)
...
@@ -2098,6 +2099,10 @@ public class EyesisCorrectionParameters {
...
@@ -2098,6 +2099,10 @@ public class EyesisCorrectionParameters {
public
int
tiNumCycles
=
5
;
// Number of cycles break-smooth (after the first smooth)
public
int
tiNumCycles
=
5
;
// Number of cycles break-smooth (after the first smooth)
// FG/BG separation
// FG/BG separation
public
boolean
stUseRefine
=
false
;
// Apply super-tiles during refine passes
public
boolean
stUsePass2
=
true
;
// Apply super-tiles during pass2
public
boolean
stUseRender
=
true
;
// Apply super-tiles during render
public
boolean
stShow
=
false
;
// Calculate and show supertiles histograms
public
boolean
stShow
=
false
;
// Calculate and show supertiles histograms
public
int
stSize
=
8
;
// Super tile size (square, in tiles)
public
int
stSize
=
8
;
// Super tile size (square, in tiles)
public
double
stStepDisparity
=
0.1
;
// Disaprity histogram step
public
double
stStepDisparity
=
0.1
;
// Disaprity histogram step
...
@@ -2112,7 +2117,33 @@ public class EyesisCorrectionParameters {
...
@@ -2112,7 +2117,33 @@ public class EyesisCorrectionParameters {
public
double
outlayerStrength
=
0.3
;
// Outlayer tiles weaker than this may be replaced from neighbors
public
double
outlayerStrength
=
0.3
;
// Outlayer tiles weaker than this may be replaced from neighbors
public
double
outlayerDiff
=
0.4
;
// Replace weak outlayer tiles that do not have neighbors within this disparity difference
public
double
outlayerDiff
=
0.4
;
// Replace weak outlayer tiles that do not have neighbors within this disparity difference
public
double
outlayerDiffPos
=
1.0
;
// Replace weak outlayer tiles that have higher disparity than weighted average
public
double
outlayerDiffNeg
=
0.4
;
// Replace weak outlayer tiles that have lower disparity than weighted average
// TODO: Make refine skip if already good?
public
boolean
combine_refine
=
true
;
// combine with all previous after refine pass
public
double
combine_min_strength
=
0.12
;
// Disregard weaker tiles when combining scans
public
double
unique_tolerance
=
0.1
;
// Do not re-measure correlation if target disparity differs from some previous by this
// Multi-pass growing disparity
public
int
grow_sweep
=
8
;
// Try these number of tiles around known ones
public
double
grow_disp_max
=
50.0
;
// Maximal disparity to try
public
double
grow_disp_trust
=
4.0
;
// Trust measured disparity within +/- this value
public
double
grow_disp_step
=
6.0
;
// Increase disparity (from maximal tried) if nothing found in that tile // TODO: handle enclosed dips?
public
double
grow_min_diff
=
0.5
;
// Grow more only if at least one channel has higher variance from others for the tile
// other debug images
public
boolean
show_ortho_combine
=
false
;
// Show 'ortho_combine'
public
boolean
show_refine_supertiles
=
false
;
// show 'refine_disparity_supertiles'
public
boolean
show_bgnd_nonbgnd
=
false
;
// show 'bgnd_nonbgnd'
public
boolean
show_filter_scan
=
false
;
// show 'FilterScan'
public
boolean
show_combined
=
false
;
// show 'combo_scan' (combined multiple scans)
public
boolean
show_unique
=
false
;
// show 'unique_scan' (removed already measured tiles with the same disparity)
public
boolean
show_shells
=
false
;
// show 'shells'
public
boolean
show_neighbors
=
false
;
// show 'neighbors'
public
boolean
show_flaps_dirs
=
false
;
// show 'flaps-dirs'
public
boolean
show_first_clusters
=
false
;
// show 'first_N_clusters'
public
CLTParameters
(){}
public
CLTParameters
(){}
...
@@ -2250,6 +2281,7 @@ public class EyesisCorrectionParameters {
...
@@ -2250,6 +2281,7 @@ public class EyesisCorrectionParameters {
properties
.
setProperty
(
prefix
+
"poles_fix"
,
this
.
poles_fix
+
""
);
properties
.
setProperty
(
prefix
+
"poles_fix"
,
this
.
poles_fix
+
""
);
properties
.
setProperty
(
prefix
+
"poles_len"
,
this
.
poles_len
+
""
);
properties
.
setProperty
(
prefix
+
"poles_len"
,
this
.
poles_len
+
""
);
properties
.
setProperty
(
prefix
+
"poles_ratio"
,
this
.
poles_ratio
+
""
);
properties
.
setProperty
(
prefix
+
"poles_min_strength"
,
this
.
poles_min_strength
+
""
);
properties
.
setProperty
(
prefix
+
"poles_min_strength"
,
this
.
poles_min_strength
+
""
);
properties
.
setProperty
(
prefix
+
"poles_force_disp"
,
this
.
poles_force_disp
+
""
);
properties
.
setProperty
(
prefix
+
"poles_force_disp"
,
this
.
poles_force_disp
+
""
);
...
@@ -2290,6 +2322,10 @@ public class EyesisCorrectionParameters {
...
@@ -2290,6 +2322,10 @@ public class EyesisCorrectionParameters {
properties
.
setProperty
(
prefix
+
"tiPrecision"
,
this
.
tiPrecision
+
""
);
properties
.
setProperty
(
prefix
+
"tiPrecision"
,
this
.
tiPrecision
+
""
);
properties
.
setProperty
(
prefix
+
"tiNumCycles"
,
this
.
tiNumCycles
+
""
);
properties
.
setProperty
(
prefix
+
"tiNumCycles"
,
this
.
tiNumCycles
+
""
);
properties
.
setProperty
(
prefix
+
"stUseRefine"
,
this
.
stUseRefine
+
""
);
properties
.
setProperty
(
prefix
+
"stUsePass2"
,
this
.
stUsePass2
+
""
);
properties
.
setProperty
(
prefix
+
"stUseRender"
,
this
.
stUseRender
+
""
);
properties
.
setProperty
(
prefix
+
"stShow"
,
this
.
stShow
+
""
);
properties
.
setProperty
(
prefix
+
"stShow"
,
this
.
stShow
+
""
);
properties
.
setProperty
(
prefix
+
"stSize"
,
this
.
stSize
+
""
);
properties
.
setProperty
(
prefix
+
"stSize"
,
this
.
stSize
+
""
);
properties
.
setProperty
(
prefix
+
"stStepDisparity"
,
this
.
stStepDisparity
+
""
);
properties
.
setProperty
(
prefix
+
"stStepDisparity"
,
this
.
stStepDisparity
+
""
);
...
@@ -2303,6 +2339,29 @@ public class EyesisCorrectionParameters {
...
@@ -2303,6 +2339,29 @@ public class EyesisCorrectionParameters {
properties
.
setProperty
(
prefix
+
"stUseDisp"
,
this
.
stUseDisp
+
""
);
properties
.
setProperty
(
prefix
+
"stUseDisp"
,
this
.
stUseDisp
+
""
);
properties
.
setProperty
(
prefix
+
"outlayerStrength"
,
this
.
outlayerStrength
+
""
);
properties
.
setProperty
(
prefix
+
"outlayerStrength"
,
this
.
outlayerStrength
+
""
);
properties
.
setProperty
(
prefix
+
"outlayerDiff"
,
this
.
outlayerDiff
+
""
);
properties
.
setProperty
(
prefix
+
"outlayerDiff"
,
this
.
outlayerDiff
+
""
);
properties
.
setProperty
(
prefix
+
"outlayerDiffPos"
,
this
.
outlayerDiffPos
+
""
);
properties
.
setProperty
(
prefix
+
"outlayerDiffNeg"
,
this
.
outlayerDiffNeg
+
""
);
properties
.
setProperty
(
prefix
+
"combine_refine"
,
this
.
combine_refine
+
""
);
properties
.
setProperty
(
prefix
+
"combine_min_strength"
,
this
.
combine_min_strength
+
""
);
properties
.
setProperty
(
prefix
+
"unique_tolerance"
,
this
.
unique_tolerance
+
""
);
properties
.
setProperty
(
prefix
+
"grow_sweep"
,
this
.
grow_sweep
+
""
);
properties
.
setProperty
(
prefix
+
"grow_disp_max"
,
this
.
grow_disp_max
+
""
);
properties
.
setProperty
(
prefix
+
"grow_disp_trust"
,
this
.
grow_disp_trust
+
""
);
properties
.
setProperty
(
prefix
+
"grow_disp_step"
,
this
.
grow_disp_step
+
""
);
properties
.
setProperty
(
prefix
+
"grow_min_diff"
,
this
.
grow_min_diff
+
""
);
properties
.
setProperty
(
prefix
+
"show_ortho_combine"
,
this
.
show_ortho_combine
+
""
);
properties
.
setProperty
(
prefix
+
"show_refine_supertiles"
,
this
.
show_refine_supertiles
+
""
);
properties
.
setProperty
(
prefix
+
"show_bgnd_nonbgnd"
,
this
.
show_bgnd_nonbgnd
+
""
);
properties
.
setProperty
(
prefix
+
"show_filter_scan"
,
this
.
show_filter_scan
+
""
);
properties
.
setProperty
(
prefix
+
"show_combined"
,
this
.
show_combined
+
""
);
properties
.
setProperty
(
prefix
+
"show_unique"
,
this
.
show_unique
+
""
);
properties
.
setProperty
(
prefix
+
"show_shells"
,
this
.
show_shells
+
""
);
properties
.
setProperty
(
prefix
+
"show_neighbors"
,
this
.
show_neighbors
+
""
);
properties
.
setProperty
(
prefix
+
"show_flaps_dirs"
,
this
.
show_flaps_dirs
+
""
);
properties
.
setProperty
(
prefix
+
"show_first_clusters"
,
this
.
show_first_clusters
+
""
);
}
}
public
void
getProperties
(
String
prefix
,
Properties
properties
){
public
void
getProperties
(
String
prefix
,
Properties
properties
){
if
(
properties
.
getProperty
(
prefix
+
"transform_size"
)!=
null
)
this
.
transform_size
=
Integer
.
parseInt
(
properties
.
getProperty
(
prefix
+
"transform_size"
));
if
(
properties
.
getProperty
(
prefix
+
"transform_size"
)!=
null
)
this
.
transform_size
=
Integer
.
parseInt
(
properties
.
getProperty
(
prefix
+
"transform_size"
));
...
@@ -2434,6 +2493,7 @@ public class EyesisCorrectionParameters {
...
@@ -2434,6 +2493,7 @@ public class EyesisCorrectionParameters {
if
(
properties
.
getProperty
(
prefix
+
"poles_fix"
)!=
null
)
this
.
poles_fix
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"poles_fix"
));
if
(
properties
.
getProperty
(
prefix
+
"poles_fix"
)!=
null
)
this
.
poles_fix
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"poles_fix"
));
if
(
properties
.
getProperty
(
prefix
+
"poles_len"
)!=
null
)
this
.
poles_len
=
Integer
.
parseInt
(
properties
.
getProperty
(
prefix
+
"poles_len"
));
if
(
properties
.
getProperty
(
prefix
+
"poles_len"
)!=
null
)
this
.
poles_len
=
Integer
.
parseInt
(
properties
.
getProperty
(
prefix
+
"poles_len"
));
if
(
properties
.
getProperty
(
prefix
+
"poles_ratio"
)!=
null
)
this
.
poles_ratio
=
Double
.
parseDouble
(
properties
.
getProperty
(
prefix
+
"poles_ratio"
));
if
(
properties
.
getProperty
(
prefix
+
"poles_min_strength"
)!=
null
)
this
.
poles_min_strength
=
Double
.
parseDouble
(
properties
.
getProperty
(
prefix
+
"poles_min_strength"
));
if
(
properties
.
getProperty
(
prefix
+
"poles_min_strength"
)!=
null
)
this
.
poles_min_strength
=
Double
.
parseDouble
(
properties
.
getProperty
(
prefix
+
"poles_min_strength"
));
if
(
properties
.
getProperty
(
prefix
+
"poles_force_disp"
)!=
null
)
this
.
poles_force_disp
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"poles_force_disp"
));
if
(
properties
.
getProperty
(
prefix
+
"poles_force_disp"
)!=
null
)
this
.
poles_force_disp
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"poles_force_disp"
));
...
@@ -2472,6 +2532,10 @@ public class EyesisCorrectionParameters {
...
@@ -2472,6 +2532,10 @@ public class EyesisCorrectionParameters {
if
(
properties
.
getProperty
(
prefix
+
"tiPrecision"
)!=
null
)
this
.
tiPrecision
=
Integer
.
parseInt
(
properties
.
getProperty
(
prefix
+
"tiPrecision"
));
if
(
properties
.
getProperty
(
prefix
+
"tiPrecision"
)!=
null
)
this
.
tiPrecision
=
Integer
.
parseInt
(
properties
.
getProperty
(
prefix
+
"tiPrecision"
));
if
(
properties
.
getProperty
(
prefix
+
"tiNumCycles"
)!=
null
)
this
.
tiNumCycles
=
Integer
.
parseInt
(
properties
.
getProperty
(
prefix
+
"tiNumCycles"
));
if
(
properties
.
getProperty
(
prefix
+
"tiNumCycles"
)!=
null
)
this
.
tiNumCycles
=
Integer
.
parseInt
(
properties
.
getProperty
(
prefix
+
"tiNumCycles"
));
if
(
properties
.
getProperty
(
prefix
+
"stUseRefine"
)!=
null
)
this
.
stUseRefine
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"stUseRefine"
));
if
(
properties
.
getProperty
(
prefix
+
"stUsePass2"
)!=
null
)
this
.
stUsePass2
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"stUsePass2"
));
if
(
properties
.
getProperty
(
prefix
+
"stUseRender"
)!=
null
)
this
.
stUseRender
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"stUseRender"
));
if
(
properties
.
getProperty
(
prefix
+
"stShow"
)!=
null
)
this
.
stShow
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"stShow"
));
if
(
properties
.
getProperty
(
prefix
+
"stShow"
)!=
null
)
this
.
stShow
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"stShow"
));
if
(
properties
.
getProperty
(
prefix
+
"stSize"
)!=
null
)
this
.
stSize
=
Integer
.
parseInt
(
properties
.
getProperty
(
prefix
+
"stSize"
));
if
(
properties
.
getProperty
(
prefix
+
"stSize"
)!=
null
)
this
.
stSize
=
Integer
.
parseInt
(
properties
.
getProperty
(
prefix
+
"stSize"
));
if
(
properties
.
getProperty
(
prefix
+
"stStepDisparity"
)!=
null
)
this
.
stStepDisparity
=
Double
.
parseDouble
(
properties
.
getProperty
(
prefix
+
"stStepDisparity"
));
if
(
properties
.
getProperty
(
prefix
+
"stStepDisparity"
)!=
null
)
this
.
stStepDisparity
=
Double
.
parseDouble
(
properties
.
getProperty
(
prefix
+
"stStepDisparity"
));
...
@@ -2485,6 +2549,30 @@ public class EyesisCorrectionParameters {
...
@@ -2485,6 +2549,30 @@ public class EyesisCorrectionParameters {
if
(
properties
.
getProperty
(
prefix
+
"stUseDisp"
)!=
null
)
this
.
stUseDisp
=
Double
.
parseDouble
(
properties
.
getProperty
(
prefix
+
"stUseDisp"
));
if
(
properties
.
getProperty
(
prefix
+
"stUseDisp"
)!=
null
)
this
.
stUseDisp
=
Double
.
parseDouble
(
properties
.
getProperty
(
prefix
+
"stUseDisp"
));
if
(
properties
.
getProperty
(
prefix
+
"outlayerStrength"
)!=
null
)
this
.
outlayerStrength
=
Double
.
parseDouble
(
properties
.
getProperty
(
prefix
+
"outlayerStrength"
));
if
(
properties
.
getProperty
(
prefix
+
"outlayerStrength"
)!=
null
)
this
.
outlayerStrength
=
Double
.
parseDouble
(
properties
.
getProperty
(
prefix
+
"outlayerStrength"
));
if
(
properties
.
getProperty
(
prefix
+
"outlayerDiff"
)!=
null
)
this
.
outlayerDiff
=
Double
.
parseDouble
(
properties
.
getProperty
(
prefix
+
"outlayerDiff"
));
if
(
properties
.
getProperty
(
prefix
+
"outlayerDiff"
)!=
null
)
this
.
outlayerDiff
=
Double
.
parseDouble
(
properties
.
getProperty
(
prefix
+
"outlayerDiff"
));
if
(
properties
.
getProperty
(
prefix
+
"outlayerDiffPos"
)!=
null
)
this
.
outlayerDiffPos
=
Double
.
parseDouble
(
properties
.
getProperty
(
prefix
+
"outlayerDiffPos"
));
if
(
properties
.
getProperty
(
prefix
+
"outlayerDiffNeg"
)!=
null
)
this
.
outlayerDiffNeg
=
Double
.
parseDouble
(
properties
.
getProperty
(
prefix
+
"outlayerDiffNeg"
));
if
(
properties
.
getProperty
(
prefix
+
"combine_refine"
)!=
null
)
this
.
combine_refine
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"combine_refine"
));
if
(
properties
.
getProperty
(
prefix
+
"combine_min_strength"
)!=
null
)
this
.
combine_min_strength
=
Double
.
parseDouble
(
properties
.
getProperty
(
prefix
+
"combine_min_strength"
));
if
(
properties
.
getProperty
(
prefix
+
"unique_tolerance"
)!=
null
)
this
.
unique_tolerance
=
Double
.
parseDouble
(
properties
.
getProperty
(
prefix
+
"unique_tolerance"
));
if
(
properties
.
getProperty
(
prefix
+
"grow_sweep"
)!=
null
)
this
.
grow_sweep
=
Integer
.
parseInt
(
properties
.
getProperty
(
prefix
+
"grow_sweep"
));
if
(
properties
.
getProperty
(
prefix
+
"grow_disp_max"
)!=
null
)
this
.
grow_disp_max
=
Double
.
parseDouble
(
properties
.
getProperty
(
prefix
+
"grow_disp_max"
));
if
(
properties
.
getProperty
(
prefix
+
"grow_disp_trust"
)!=
null
)
this
.
grow_disp_trust
=
Double
.
parseDouble
(
properties
.
getProperty
(
prefix
+
"grow_disp_trust"
));
if
(
properties
.
getProperty
(
prefix
+
"grow_disp_step"
)!=
null
)
this
.
grow_disp_step
=
Double
.
parseDouble
(
properties
.
getProperty
(
prefix
+
"grow_disp_step"
));
if
(
properties
.
getProperty
(
prefix
+
"grow_min_diff"
)!=
null
)
this
.
grow_min_diff
=
Double
.
parseDouble
(
properties
.
getProperty
(
prefix
+
"grow_min_diff"
));
if
(
properties
.
getProperty
(
prefix
+
"show_ortho_combine"
)!=
null
)
this
.
show_ortho_combine
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"show_ortho_combine"
));
if
(
properties
.
getProperty
(
prefix
+
"show_refine_supertiles"
)!=
null
)
this
.
show_refine_supertiles
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"show_refine_supertiles"
));
if
(
properties
.
getProperty
(
prefix
+
"show_bgnd_nonbgnd"
)!=
null
)
this
.
show_bgnd_nonbgnd
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"show_bgnd_nonbgnd"
));
if
(
properties
.
getProperty
(
prefix
+
"show_filter_scan"
)!=
null
)
this
.
show_filter_scan
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"show_filter_scan"
));
if
(
properties
.
getProperty
(
prefix
+
"show_combined"
)!=
null
)
this
.
show_combined
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"show_combined"
));
if
(
properties
.
getProperty
(
prefix
+
"show_unique"
)!=
null
)
this
.
show_unique
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"show_unique"
));
if
(
properties
.
getProperty
(
prefix
+
"show_shells"
)!=
null
)
this
.
show_shells
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"show_shells"
));
if
(
properties
.
getProperty
(
prefix
+
"show_neighbors"
)!=
null
)
this
.
show_neighbors
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"show_neighbors"
));
if
(
properties
.
getProperty
(
prefix
+
"show_flaps_dirs"
)!=
null
)
this
.
show_flaps_dirs
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"show_flaps_dirs"
));
if
(
properties
.
getProperty
(
prefix
+
"show_first_clusters"
)!=
null
)
this
.
show_first_clusters
=
Boolean
.
parseBoolean
(
properties
.
getProperty
(
prefix
+
"show_first_clusters"
));
}
}
public
boolean
showDialog
()
{
public
boolean
showDialog
()
{
...
@@ -2634,6 +2722,7 @@ public class EyesisCorrectionParameters {
...
@@ -2634,6 +2722,7 @@ public class EyesisCorrectionParameters {
gd
.
addMessage
(
"--- Fix vertical structures, such as street poles ---"
);
gd
.
addMessage
(
"--- Fix vertical structures, such as street poles ---"
);
gd
.
addCheckbox
(
"Continue vertical structures to the ground"
,
this
.
poles_fix
);
gd
.
addCheckbox
(
"Continue vertical structures to the ground"
,
this
.
poles_fix
);
gd
.
addNumericField
(
"Number of tiles to extend over the poles bottoms"
,
this
.
poles_len
,
0
);
gd
.
addNumericField
(
"Number of tiles to extend over the poles bottoms"
,
this
.
poles_len
,
0
);
gd
.
addNumericField
(
"Maximal ratio of invisible to visible pole length"
,
this
.
poles_ratio
,
3
);
gd
.
addNumericField
(
"Set new pole segment strength to max of horizontal correlation and this value"
,
this
.
poles_min_strength
,
3
);
gd
.
addNumericField
(
"Set new pole segment strength to max of horizontal correlation and this value"
,
this
.
poles_min_strength
,
3
);
gd
.
addCheckbox
(
"Set disparity to that of the bottom of existing segment (false - use hor. disparity)"
,
this
.
poles_force_disp
);
gd
.
addCheckbox
(
"Set disparity to that of the bottom of existing segment (false - use hor. disparity)"
,
this
.
poles_force_disp
);
...
@@ -2675,7 +2764,11 @@ public class EyesisCorrectionParameters {
...
@@ -2675,7 +2764,11 @@ public class EyesisCorrectionParameters {
gd
.
addNumericField
(
"Iteration maximal error (1/power of 10)"
,
this
.
tiPrecision
,
0
);
gd
.
addNumericField
(
"Iteration maximal error (1/power of 10)"
,
this
.
tiPrecision
,
0
);
gd
.
addNumericField
(
"Number of cycles break-smooth (after the first smooth)"
,
this
.
tiNumCycles
,
0
);
gd
.
addNumericField
(
"Number of cycles break-smooth (after the first smooth)"
,
this
.
tiNumCycles
,
0
);
gd
.
addMessage
(
"--- Fg/Bg separation ---"
);
gd
.
addMessage
(
"--- Fg/Bg separation ---"
);
gd
.
addCheckbox
(
"Calculate and show supertiles histograms"
,
this
.
stShow
);
gd
.
addCheckbox
(
"Apply super-tiles during refine passes"
,
this
.
stUseRefine
);
gd
.
addCheckbox
(
"Apply super-tiles during pass2 "
,
this
.
stUsePass2
);
gd
.
addCheckbox
(
"Apply super-tiles during render"
,
this
.
stUseRender
);
gd
.
addCheckbox
(
"Show supertiles histograms"
,
this
.
stShow
);
gd
.
addNumericField
(
"Super tile size (square, in tiles)"
,
this
.
stSize
,
0
);
gd
.
addNumericField
(
"Super tile size (square, in tiles)"
,
this
.
stSize
,
0
);
gd
.
addNumericField
(
"Disaprity histogram step"
,
this
.
stStepDisparity
,
6
);
gd
.
addNumericField
(
"Disaprity histogram step"
,
this
.
stStepDisparity
,
6
);
gd
.
addNumericField
(
"Minimal disparity (center of a bin)"
,
this
.
stMinDisparity
,
6
);
gd
.
addNumericField
(
"Minimal disparity (center of a bin)"
,
this
.
stMinDisparity
,
6
);
...
@@ -2687,8 +2780,31 @@ public class EyesisCorrectionParameters {
...
@@ -2687,8 +2780,31 @@ public class EyesisCorrectionParameters {
gd
.
addNumericField
(
"Minimal fraction of the disparity histogram to use as background"
,
this
.
stMinBgFract
,
6
);
gd
.
addNumericField
(
"Minimal fraction of the disparity histogram to use as background"
,
this
.
stMinBgFract
,
6
);
gd
.
addNumericField
(
"Use background disparity from supertiles if tile strength is less"
,
this
.
stUseDisp
,
6
);
gd
.
addNumericField
(
"Use background disparity from supertiles if tile strength is less"
,
this
.
stUseDisp
,
6
);
gd
.
addNumericField
(
"Outlayer tiles weaker than this may be replaced from neighbors"
,
this
.
outlayerStrength
,
6
);
gd
.
addNumericField
(
"Outlayer tiles weaker than this may be replaced from neighbors"
,
this
.
outlayerStrength
,
6
);
gd
.
addNumericField
(
"Replace weak outlayer tiles that do not have neighbors within this disparity difference "
,
this
.
outlayerDiff
,
6
);
gd
.
addNumericField
(
"Replace weak outlayer tiles that do not have neighbors within this disparity difference"
,
this
.
outlayerDiff
,
6
);
gd
.
addNumericField
(
"Replace weak outlayer tiles that have higher disparity than weighted average"
,
this
.
outlayerDiffPos
,
6
);
gd
.
addNumericField
(
"Replace weak outlayer tiles that have lower disparity than weighted average"
,
this
.
outlayerDiffNeg
,
6
);
gd
.
addCheckbox
(
"Combine with all previous after refine pass"
,
this
.
combine_refine
);
gd
.
addNumericField
(
"Disregard weaker tiles when combining scans"
,
this
.
combine_min_strength
,
6
);
gd
.
addNumericField
(
"Do not re-measure correlation if target disparity differs from some previous by this"
,
this
.
unique_tolerance
,
6
);
gd
.
addMessage
(
"--- Growing disparity range to scan ---"
);
gd
.
addNumericField
(
"Try these number of tiles around known ones"
,
this
.
grow_sweep
,
0
);
gd
.
addNumericField
(
"Maximal disparity to try"
,
this
.
grow_disp_max
,
6
);
gd
.
addNumericField
(
"Trust measured disparity within +/- this value"
,
this
.
grow_disp_trust
,
6
);
gd
.
addNumericField
(
"Increase disparity (from maximal tried) if nothing found in that tile"
,
this
.
grow_disp_step
,
6
);
gd
.
addNumericField
(
"Grow more only if at least one channel has higher variance from others for the tile"
,
this
.
grow_min_diff
,
6
);
gd
.
addMessage
(
"--- Other debug images ---"
);
gd
.
addCheckbox
(
"Show 'ortho_combine'"
,
this
.
show_ortho_combine
);
gd
.
addCheckbox
(
"Show 'refine_disparity_supertiles'"
,
this
.
show_refine_supertiles
);
gd
.
addCheckbox
(
"Show 'bgnd_nonbgnd'"
,
this
.
show_bgnd_nonbgnd
);
gd
.
addCheckbox
(
"Show 'FilterScan'"
,
this
.
show_filter_scan
);
gd
.
addCheckbox
(
"Show 'combo_scan' (combined multiple scans)"
,
this
.
show_combined
);
gd
.
addCheckbox
(
"Show 'unique_scan' (removed already measured tiles with the same disparity)"
,
this
.
show_unique
);
gd
.
addCheckbox
(
"Show 'shells'"
,
this
.
show_shells
);
gd
.
addCheckbox
(
"show 'neighbors'"
,
this
.
show_neighbors
);
gd
.
addCheckbox
(
"Show 'flaps-dirs'"
,
this
.
show_flaps_dirs
);
gd
.
addCheckbox
(
"Show 'first_N_clusters'"
,
this
.
show_first_clusters
);
WindowTools
.
addScrollBars
(
gd
);
WindowTools
.
addScrollBars
(
gd
);
gd
.
showDialog
();
gd
.
showDialog
();
...
@@ -2827,6 +2943,7 @@ public class EyesisCorrectionParameters {
...
@@ -2827,6 +2943,7 @@ public class EyesisCorrectionParameters {
this
.
poles_fix
=
gd
.
getNextBoolean
();
this
.
poles_fix
=
gd
.
getNextBoolean
();
this
.
poles_len
=
(
int
)
gd
.
getNextNumber
();
this
.
poles_len
=
(
int
)
gd
.
getNextNumber
();
this
.
poles_ratio
=
gd
.
getNextNumber
();
this
.
poles_min_strength
=
gd
.
getNextNumber
();
this
.
poles_min_strength
=
gd
.
getNextNumber
();
this
.
poles_force_disp
=
gd
.
getNextBoolean
();
this
.
poles_force_disp
=
gd
.
getNextBoolean
();
...
@@ -2865,6 +2982,10 @@ public class EyesisCorrectionParameters {
...
@@ -2865,6 +2982,10 @@ public class EyesisCorrectionParameters {
this
.
tiPrecision
=
(
int
)
gd
.
getNextNumber
();
this
.
tiPrecision
=
(
int
)
gd
.
getNextNumber
();
this
.
tiNumCycles
=
(
int
)
gd
.
getNextNumber
();
this
.
tiNumCycles
=
(
int
)
gd
.
getNextNumber
();
this
.
stUseRefine
=
gd
.
getNextBoolean
();
this
.
stUsePass2
=
gd
.
getNextBoolean
();
this
.
stUseRender
=
gd
.
getNextBoolean
();
this
.
stShow
=
gd
.
getNextBoolean
();
this
.
stShow
=
gd
.
getNextBoolean
();
this
.
stSize
=
(
int
)
gd
.
getNextNumber
();
this
.
stSize
=
(
int
)
gd
.
getNextNumber
();
this
.
stStepDisparity
=
gd
.
getNextNumber
();
this
.
stStepDisparity
=
gd
.
getNextNumber
();
...
@@ -2878,6 +2999,30 @@ public class EyesisCorrectionParameters {
...
@@ -2878,6 +2999,30 @@ public class EyesisCorrectionParameters {
this
.
stUseDisp
=
gd
.
getNextNumber
();
this
.
stUseDisp
=
gd
.
getNextNumber
();
this
.
outlayerStrength
=
gd
.
getNextNumber
();
this
.
outlayerStrength
=
gd
.
getNextNumber
();
this
.
outlayerDiff
=
gd
.
getNextNumber
();
this
.
outlayerDiff
=
gd
.
getNextNumber
();
this
.
outlayerDiffPos
=
gd
.
getNextNumber
();
this
.
outlayerDiffNeg
=
gd
.
getNextNumber
();
this
.
combine_refine
=
gd
.
getNextBoolean
();
this
.
combine_min_strength
=
gd
.
getNextNumber
();
this
.
unique_tolerance
=
gd
.
getNextNumber
();
this
.
grow_sweep
=
(
int
)
gd
.
getNextNumber
();
this
.
grow_disp_max
=
gd
.
getNextNumber
();
this
.
grow_disp_trust
=
gd
.
getNextNumber
();
this
.
grow_disp_step
=
gd
.
getNextNumber
();
this
.
grow_min_diff
=
gd
.
getNextNumber
();
this
.
show_ortho_combine
=
gd
.
getNextBoolean
();
this
.
show_refine_supertiles
=
gd
.
getNextBoolean
();
this
.
show_bgnd_nonbgnd
=
gd
.
getNextBoolean
();
// first on second pass
this
.
show_filter_scan
=
gd
.
getNextBoolean
();
// first on refine
this
.
show_combined
=
gd
.
getNextBoolean
();
this
.
show_unique
=
gd
.
getNextBoolean
();
this
.
show_shells
=
gd
.
getNextBoolean
();
this
.
show_neighbors
=
gd
.
getNextBoolean
();
this
.
show_flaps_dirs
=
gd
.
getNextBoolean
();
this
.
show_first_clusters
=
gd
.
getNextBoolean
();
return
true
;
return
true
;
}
}
}
}
...
...
src/main/java/QuadCLT.java
View file @
369f2898
...
@@ -4752,15 +4752,16 @@ public class QuadCLT {
...
@@ -4752,15 +4752,16 @@ public class QuadCLT {
// refine first measurement
// refine first measurement
int
bg_pass
=
tp
.
clt_3d_passes
.
size
()
-
1
;
// 0
int
bg_pass
=
tp
.
clt_3d_passes
.
size
()
-
1
;
// 0
int
refine_pass
=
tp
.
clt_3d_passes
.
size
();
// 1
int
refine_pass
=
tp
.
clt_3d_passes
.
size
();
// 1
for
(
int
nnn
=
0
;
nnn
<
3
;
nnn
++){
for
(
int
nnn
=
0
;
nnn
<
4
;
nnn
++){
refine_pass
=
tp
.
clt_3d_passes
.
size
();
// 1
refine_pass
=
tp
.
clt_3d_passes
.
size
();
// 1
tp
.
refinePassSetup
(
// prepare tile tasks for the refine pass (re-measure disparities)
tp
.
refinePassSetup
(
// prepare tile tasks for the refine pass (re-measure disparities)
// final double [][][] image_data, // first index - number of image in a quad
// final double [][][] image_data, // first index - number of image in a quad
clt_parameters
,
clt_parameters
,
clt_parameters
.
stUseRefine
,
// use supertiles
bg_pass
,
bg_pass
,
// disparity range - differences from
// disparity range - differences from
clt_parameters
.
bgnd_range
,
// double disparity_far,
clt_parameters
.
bgnd_range
,
// double disparity_far,
clt_parameters
.
other_range
,
//double disparity_near, //
clt_parameters
.
grow_disp_max
,
//
other_range, //double disparity_near, //
clt_parameters
.
bgnd_sure
,
// double this_sure, // minimal strength to be considered definitely background
clt_parameters
.
bgnd_sure
,
// double this_sure, // minimal strength to be considered definitely background
clt_parameters
.
bgnd_maybe
,
// double this_maybe, // maximal strength to ignore as non-background
clt_parameters
.
bgnd_maybe
,
// double this_maybe, // maximal strength to ignore as non-background
clt_parameters
.
sure_smth
,
// sure_smth, // if 2-nd worst image difference (noise-normalized) exceeds this - do not propagate bgnd
clt_parameters
.
sure_smth
,
// sure_smth, // if 2-nd worst image difference (noise-normalized) exceeds this - do not propagate bgnd
...
@@ -4769,6 +4770,107 @@ public class QuadCLT {
...
@@ -4769,6 +4770,107 @@ public class QuadCLT {
threadsMax
,
// maximal number of threads to launch
threadsMax
,
// maximal number of threads to launch
updateStatus
,
updateStatus
,
debugLevel
);
debugLevel
);
int
[]
numLeftRemoved
=
tp
.
makeUnique
(
tp
.
clt_3d_passes
,
// final ArrayList <CLTPass3d> passes,
0
,
// final int firstPass,
refine_pass
-
1
,
// final int lastPassPlus1,
tp
.
clt_3d_passes
.
get
(
refine_pass
),
// final CLTPass3d new_scan,
clt_parameters
.
unique_tolerance
,
// final double unique_tolerance,
clt_parameters
.
show_unique
);
// final boolean show_unique)
if
(
debugLevel
>
-
1
){
System
.
out
.
println
(
"cycle makeUnique("
+
refine_pass
+
") -> left: "
+
numLeftRemoved
[
0
]+
", removed:"
+
numLeftRemoved
[
1
]);
}
CLTMeasure
(
// perform single pass according to prepared tiles operations and disparity
image_data
,
// first index - number of image in a quad
clt_parameters
,
refine_pass
,
threadsMax
,
// maximal number of threads to launch
updateStatus
,
debugLevel
);
if
(
debugLevel
>
-
1
){
System
.
out
.
println
(
"CLTMeasure("
+
refine_pass
+
")"
);
}
if
(
clt_parameters
.
combine_refine
){
// TileProcessor.CLTPass3d scan = tp.clt_3d_passes.get(scanIndex);
TileProcessor
.
CLTPass3d
combo_pass
=
tp
.
combinePasses
(
tp
.
clt_3d_passes
,
// final ArrayList <CLTPass3d> passes,
bg_pass
,
// final int firstPass,
tp
.
clt_3d_passes
.
size
(),
// final int lastPassPlus1,
true
,
// skip_combo, // do not process other combo scans
true
,
// final boolean use_last, // use last scan data if nothing better
false
,
// not calculated yet! true, // final boolean useCombo, // use combined disparity/strength (false - use measured full correlation
false
,
// final boolean usePoly, // use polynomial method to find max), valid if useCombo == false
clt_parameters
.
combine_min_strength
,
// final double minStrength, // ignore too weak tiles
clt_parameters
.
show_combined
);
tp
.
clt_3d_passes
.
add
(
combo_pass
);
// refine_pass = tp.clt_3d_passes.size();
}
}
// process once more to try combining of processed
refine_pass
=
tp
.
clt_3d_passes
.
size
();
// 1
tp
.
refinePassSetup
(
// prepare tile tasks for the refine pass (re-measure disparities)
// final double [][][] image_data, // first index - number of image in a quad
clt_parameters
,
clt_parameters
.
stUseRefine
,
// use supertiles
bg_pass
,
// disparity range - differences from
clt_parameters
.
bgnd_range
,
// double disparity_far,
clt_parameters
.
grow_disp_max
,
// other_range, //double disparity_near, //
clt_parameters
.
bgnd_sure
,
// double this_sure, // minimal strength to be considered definitely background
clt_parameters
.
bgnd_maybe
,
// double this_maybe, // maximal strength to ignore as non-background
clt_parameters
.
sure_smth
,
// sure_smth, // if 2-nd worst image difference (noise-normalized) exceeds this - do not propagate bgnd
ImageDtt
.
DISPARITY_INDEX_CM
,
// index of disparity value in disparity_map == 2 (0,2 or 4)
geometryCorrection
,
threadsMax
,
// maximal number of threads to launch
updateStatus
,
debugLevel
);
TileProcessor
.
CLTPass3d
extended_pass
=
tp
.
combinePasses
(
tp
.
clt_3d_passes
,
// final ArrayList <CLTPass3d> passes,
bg_pass
,
// final int firstPass,
tp
.
clt_3d_passes
.
size
(),
// final int lastPassPlus1,
false
,
// skip_combo, // do not process other combo scans
true
,
// final boolean use_last, // use last scan data if nothing better
true
,
// not calculated yet! true, // final boolean useCombo, // use combined disparity/strength (false - use measured full correlation
false
,
// final boolean usePoly, // use polynomial method to find max), valid if useCombo == false
clt_parameters
.
combine_min_strength
,
// final double minStrength, // ignore too weak tiles
true
);
// clt_parameters.show_combined);
tp
.
setupExtendDisparity
(
extended_pass
,
// final CLTPass3d scan, // combined scan with max_tried_disparity, will be modified to re-scan
tp
.
clt_3d_passes
.
get
(
refine_pass
),
// final CLTPass3d last_scan, // last prepared tile - can use last_scan.disparity, .border_tiles and .selected
tp
.
clt_3d_passes
.
get
(
bg_pass
),
// final CLTPass3d bg_scan, // background scan data
clt_parameters
.
grow_sweep
,
// 8; // Try these number of tiles around known ones
clt_parameters
.
grow_disp_max
,
// = 50.0; // Maximal disparity to try
clt_parameters
.
grow_disp_trust
,
// = 4.0; // Trust measured disparity within +/- this value
clt_parameters
.
grow_disp_step
,
// = 6.0; // Increase disparity (from maximal tried) if nothing found in that tile // TODO: handle enclosed dips?
clt_parameters
.
grow_min_diff
,
// = 0.5; // Grow more only if at least one channel has higher variance from others for the tile
clt_parameters
,
// EyesisCorrectionParameters.CLTParameters clt_parameters,
geometryCorrection
,
// GeometryCorrection geometryCorrection,
true
,
// final boolean show_debug,
threadsMax
,
// maximal number of threads to launch
updateStatus
,
debugLevel
);
refine_pass
=
tp
.
clt_3d_passes
.
size
();
// 1
tp
.
clt_3d_passes
.
add
(
extended_pass
);
int
[]
numLeftRemoved
=
tp
.
makeUnique
(
tp
.
clt_3d_passes
,
// final ArrayList <CLTPass3d> passes,
0
,
// final int firstPass,
refine_pass
-
1
,
// final int lastPassPlus1,
tp
.
clt_3d_passes
.
get
(
refine_pass
),
// final CLTPass3d new_scan,
clt_parameters
.
unique_tolerance
,
// final double unique_tolerance,
clt_parameters
.
show_unique
);
// final boolean show_unique)
if
(
debugLevel
>
-
1
){
System
.
out
.
println
(
"last makeUnique("
+
refine_pass
+
") -> left: "
+
numLeftRemoved
[
0
]+
", removed:"
+
numLeftRemoved
[
1
]);
}
// refine_pass = tp.clt_3d_passes.size(); //
CLTMeasure
(
// perform single pass according to prepared tiles operations and disparity
CLTMeasure
(
// perform single pass according to prepared tiles operations and disparity
image_data
,
// first index - number of image in a quad
image_data
,
// first index - number of image in a quad
clt_parameters
,
clt_parameters
,
...
@@ -4776,20 +4878,74 @@ public class QuadCLT {
...
@@ -4776,20 +4878,74 @@ public class QuadCLT {
threadsMax
,
// maximal number of threads to launch
threadsMax
,
// maximal number of threads to launch
updateStatus
,
updateStatus
,
debugLevel
);
debugLevel
);
if
(
debugLevel
>
-
1
){
System
.
out
.
println
(
"extending: CLTMeasure("
+
refine_pass
+
")"
);
}
if
(
clt_parameters
.
combine_refine
){
// TileProcessor.CLTPass3d scan = tp.clt_3d_passes.get(scanIndex);
TileProcessor
.
CLTPass3d
combo_pass
=
tp
.
combinePasses
(
tp
.
clt_3d_passes
,
// final ArrayList <CLTPass3d> passes,
bg_pass
,
// final int firstPass,
tp
.
clt_3d_passes
.
size
(),
// final int lastPassPlus1,
true
,
// skip_combo, // do not process other combo scans
true
,
// final boolean use_last, // use last scan data if nothing better
false
,
// not calculated yet! true, // final boolean useCombo, // use combined disparity/strength (false - use measured full correlation
false
,
// final boolean usePoly, // use polynomial method to find max), valid if useCombo == false
clt_parameters
.
combine_min_strength
,
// final double minStrength, // ignore too weak tiles
clt_parameters
.
show_combined
);
tp
.
clt_3d_passes
.
add
(
combo_pass
);
// refine_pass = tp.clt_3d_passes.size();
}
refine_pass
=
tp
.
clt_3d_passes
.
size
();
// 1
// Refine after extension
tp
.
refinePassSetup
(
// prepare tile tasks for the refine pass (re-measure disparities)
// final double [][][] image_data, // first index - number of image in a quad
clt_parameters
,
clt_parameters
.
stUseRefine
,
// use supertiles
bg_pass
,
// disparity range - differences from
clt_parameters
.
bgnd_range
,
// double disparity_far,
clt_parameters
.
grow_disp_max
,
// other_range, //double disparity_near, //
clt_parameters
.
bgnd_sure
,
// double this_sure, // minimal strength to be considered definitely background
clt_parameters
.
bgnd_maybe
,
// double this_maybe, // maximal strength to ignore as non-background
clt_parameters
.
sure_smth
,
// sure_smth, // if 2-nd worst image difference (noise-normalized) exceeds this - do not propagate bgnd
ImageDtt
.
DISPARITY_INDEX_CM
,
// index of disparity value in disparity_map == 2 (0,2 or 4)
geometryCorrection
,
threadsMax
,
// maximal number of threads to launch
updateStatus
,
2
);
// debugLevel);
numLeftRemoved
=
tp
.
makeUnique
(
tp
.
clt_3d_passes
,
// final ArrayList <CLTPass3d> passes,
0
,
// final int firstPass,
refine_pass
-
1
,
// final int lastPassPlus1,
tp
.
clt_3d_passes
.
get
(
refine_pass
),
// final CLTPass3d new_scan,
clt_parameters
.
unique_tolerance
,
// final double unique_tolerance,
clt_parameters
.
show_unique
);
// final boolean show_unique)
if
(
debugLevel
>
-
1
){
System
.
out
.
println
(
"makeUnique("
+
refine_pass
+
") -> left: "
+
numLeftRemoved
[
0
]+
", removed:"
+
numLeftRemoved
[
1
]);
}
}
// TEMPORARY EXIT
if
(
tp
.
clt_3d_passes
.
size
()
>
0
)
return
null
;
// just to fool compiler
// testing 2-nd pass
// testing 2-nd pass
int
next_pass
=
tp
.
clt_3d_passes
.
size
();
// 2
int
next_pass
=
tp
.
clt_3d_passes
.
size
();
// 2
tp
.
secondPassSetup
(
// prepare tile tasks for the second pass based on the previous one(s)
tp
.
secondPassSetup
(
// prepare tile tasks for the second pass based on the previous one(s)
// final double [][][] image_data, // first index - number of image in a quad
// final double [][][] image_data, // first index - number of image in a quad
clt_parameters
,
clt_parameters
,
clt_parameters
.
stUsePass2
,
// use supertiles
bg_pass
,
bg_pass
,
// disparity range - differences from
// disparity range - differences from
clt_parameters
.
bgnd_range
,
// double disparity_far,
clt_parameters
.
bgnd_range
,
// double disparity_far,
clt_parameters
.
other_range
,
//double disparity_near, //
clt_parameters
.
grow_disp_max
,
//
other_range, //double disparity_near, //
clt_parameters
.
bgnd_sure
,
// double this_sure, // minimal strength to be considered definitely background
clt_parameters
.
bgnd_sure
,
// double this_sure, // minimal strength to be considered definitely background
clt_parameters
.
bgnd_maybe
,
// double this_maybe, // maximal strength to ignore as non-background
clt_parameters
.
bgnd_maybe
,
// double this_maybe, // maximal strength to ignore as non-background
clt_parameters
.
sure_smth
,
// sure_smth, // if 2-nd worst image difference (noise-normalized) exceeds this - do not propagate bgnd
clt_parameters
.
sure_smth
,
// sure_smth, // if 2-nd worst image difference (noise-normalized) exceeds this - do not propagate bgnd
...
@@ -4977,6 +5133,7 @@ public class QuadCLT {
...
@@ -4977,6 +5133,7 @@ public class QuadCLT {
clt_parameters
.
min_clstr_seed
,
// number of tiles in a cluster to seed (just background?)
clt_parameters
.
min_clstr_seed
,
// number of tiles in a cluster to seed (just background?)
clt_parameters
.
min_clstr_block
,
// number of tiles in a cluster to block (just non-background?)
clt_parameters
.
min_clstr_block
,
// number of tiles in a cluster to block (just non-background?)
disparity_index
,
// index of disparity value in disparity_map == 2 (0,2 or 4)
disparity_index
,
// index of disparity value in disparity_map == 2 (0,2 or 4)
clt_parameters
.
show_bgnd_nonbgnd
,
(
clt_parameters
.
debug_filters
?
debugLevel
:
-
1
));
(
clt_parameters
.
debug_filters
?
debugLevel
:
-
1
));
boolean
[]
bgnd_strict
=
bgnd_tiles
.
clone
();
// only these have non 0 alpha
boolean
[]
bgnd_strict
=
bgnd_tiles
.
clone
();
// only these have non 0 alpha
tp
.
growTiles
(
tp
.
growTiles
(
...
...
src/main/java/TileProcessor.java
View file @
369f2898
...
@@ -65,13 +65,15 @@ public class TileProcessor {
...
@@ -65,13 +65,15 @@ public class TileProcessor {
public
class
CLTPass3d
{
public
class
CLTPass3d
{
public
double
[][]
disparity
;
// per-tile disparity set for the pass[tileY][tileX]
public
double
[][]
disparity
;
// per-tile disparity set for the pass[tileY][tileX]
public
int
[][]
tile_op
;
// what was done in the current pass
public
int
[][]
tile_op
;
// what was done in the current pass
public
double
[][]
disparity_map
;
// add 4 layers - worst difference for the port
public
double
[][]
disparity_map
=
null
;
// add 4 layers - worst difference for the port
private
double
[]
calc_disparity
=
null
;
// composite disparity, calculated from "disparity", and "disparity_map" fields
private
double
[]
calc_disparity
=
null
;
// composite disparity, calculated from "disparity", and "disparity_map" fields
// using horizontal features and corr_magic_scale
// using horizontal features and corr_magic_scale
private
double
[]
calc_disparity_hor
=
null
;
// composite disparity, calculated from "disparity", and "disparity_map" fields
private
double
[]
calc_disparity_hor
=
null
;
// composite disparity, calculated from "disparity", and "disparity_map" fields
private
double
[]
calc_disparity_vert
=
null
;
// composite disparity, calculated from "disparity", and "disparity_map" fields
private
double
[]
calc_disparity_vert
=
null
;
// composite disparity, calculated from "disparity", and "disparity_map" fields
private
double
[]
calc_disparity_combo
=
null
;
// composite disparity, calculated from "disparity", and "disparity_map" fields
private
double
[]
calc_disparity_combo
=
null
;
// composite disparity, calculated from "disparity", and "disparity_map" fields
private
double
[]
strength
=
null
;
// composite strength, initially uses a copy of raw 4-sensor correleation strength
private
double
[]
strength
=
null
;
// composite strength, initially uses a copy of raw 4-sensor correleation strength
private
double
[]
strength_hor
=
null
;
// updated hor strength, initially uses a copy of raw measured
private
double
[]
strength_vert
=
null
;
// updated hor strength, initially uses a copy of raw measured
// Bg disparity & strength is calculated from the supertiles and used instead of the tile disparity if it is too weak. Assuming, that
// Bg disparity & strength is calculated from the supertiles and used instead of the tile disparity if it is too weak. Assuming, that
// foreground features should have good correlation details, and if the tile does not nhave them it likely belongs to the background.
// foreground features should have good correlation details, and if the tile does not nhave them it likely belongs to the background.
// calculate disparity and strength from the (lapped) supertiles, using lowest allowed (>= minBgDisparity) disparity histogram maximums
// calculate disparity and strength from the (lapped) supertiles, using lowest allowed (>= minBgDisparity) disparity histogram maximums
...
@@ -81,16 +83,16 @@ public class TileProcessor {
...
@@ -81,16 +83,16 @@ public class TileProcessor {
// exceeds minBgFract, otherwise proceed to the next one (and accumulate strength)
// exceeds minBgFract, otherwise proceed to the next one (and accumulate strength)
private
double
[]
bgTileDisparity
=
null
;
private
double
[]
bgTileDisparity
=
null
;
private
double
[]
bgTileStrength
=
null
;
private
double
[]
bgTileStrength
=
null
;
public
boolean
[]
border_tiles
;
// these are border tiles, zero out alpha
public
boolean
[]
border_tiles
=
null
;
// these are border tiles, zero out alpha
public
boolean
[]
selected
;
// which tiles are selected for this layer
public
boolean
[]
selected
=
null
;
// which tiles are selected for this layer
public
double
[][][][]
texture_tiles
;
public
double
[][][][]
texture_tiles
;
public
double
[][]
max_tried_disparity
=
null
;
//[ty][tx] used for combined passes, shows maximal disparity wor this tile, regardless of results
public
boolean
is_combo
=
false
;
public
String
texture
=
null
;
// relative (to x3d) path
public
String
texture
=
null
;
// relative (to x3d) path
public
Rectangle
bounds
;
public
Rectangle
bounds
;
public
int
dbg_index
;
public
int
dbg_index
;
public
int
disparity_index
=
ImageDtt
.
DISPARITY_INDEX_CM
;
// may also be ImageDtt.DISPARITY_INDEX_POLY
public
int
disparity_index
=
ImageDtt
.
DISPARITY_INDEX_CM
;
// may also be ImageDtt.DISPARITY_INDEX_POLY
SuperTiles
superTiles
=
null
;
SuperTiles
superTiles
=
null
;
public
void
updateSelection
(){
// add updating border tiles?
public
void
updateSelection
(){
// add updating border tiles?
...
@@ -110,6 +112,18 @@ public class TileProcessor {
...
@@ -110,6 +112,18 @@ public class TileProcessor {
bounds
=
new
Rectangle
(
minX
,
minY
,
maxX
-
minX
+
1
,
maxY
-
minY
+
1
);
bounds
=
new
Rectangle
(
minX
,
minY
,
maxX
-
minX
+
1
,
maxY
-
minY
+
1
);
}
}
public
boolean
isProcessed
(){
return
calc_disparity
!=
null
;
}
public
boolean
isMeasured
(){
return
disparity_map
!=
null
;
}
public
boolean
isCombo
(){
return
is_combo
;
}
/**
/**
* Get FPGA-calculated per-tile maximal differences between the particular image and the average one.
* Get FPGA-calculated per-tile maximal differences between the particular image and the average one.
* @return per-camera sesnor array of line-scan differences
* @return per-camera sesnor array of line-scan differences
...
@@ -124,6 +138,8 @@ public class TileProcessor {
...
@@ -124,6 +138,8 @@ public class TileProcessor {
public
void
resetCalc
(){
// only needed if the same task was reused
public
void
resetCalc
(){
// only needed if the same task was reused
calc_disparity
=
null
;
calc_disparity
=
null
;
strength
=
null
;
strength
=
null
;
strength_hor
=
null
;
strength_vert
=
null
;
superTiles
=
null
;
superTiles
=
null
;
}
}
...
@@ -135,7 +151,7 @@ public class TileProcessor {
...
@@ -135,7 +151,7 @@ public class TileProcessor {
boolean
combineHor
,
boolean
combineHor
,
boolean
combineVert
)
boolean
combineVert
)
{
{
getStrength
();
getStrength
();
// clone if not done yet
if
(
combineHor
){
if
(
combineHor
){
double
[]
hstrength
=
getHorStrength
();
double
[]
hstrength
=
getHorStrength
();
for
(
int
i
=
0
;
i
<
strength
.
length
;
i
++)
{
for
(
int
i
=
0
;
i
<
strength
.
length
;
i
++)
{
...
@@ -190,14 +206,20 @@ public class TileProcessor {
...
@@ -190,14 +206,20 @@ public class TileProcessor {
* @return line-scan array of per-tile horizontal pairs correlation strength by reference (not a copy)
* @return line-scan array of per-tile horizontal pairs correlation strength by reference (not a copy)
*/
*/
public
double
[]
getHorStrength
(){
public
double
[]
getHorStrength
(){
return
disparity_map
[
ImageDtt
.
DISPARITY_INDEX_HOR_STRENGTH
];
if
(
strength_hor
==
null
)
{
strength_hor
=
disparity_map
[
ImageDtt
.
DISPARITY_INDEX_HOR_STRENGTH
].
clone
();
}
return
strength_hor
;
}
}
/**
/**
* Get veriical pairs correlation strength for horizontal features. Not a copy
* Get veriical pairs correlation strength for horizontal features. Not a copy
* @return line-scan array of per-tile horizontal pairs correlation strength by reference (not a copy)
* @return line-scan array of per-tile horizontal pairs correlation strength by reference (not a copy)
*/
*/
public
double
[]
getVertStrength
(){
public
double
[]
getVertStrength
(){
return
disparity_map
[
ImageDtt
.
DISPARITY_INDEX_VERT_STRENGTH
];
if
(
strength_vert
==
null
)
{
strength_vert
=
disparity_map
[
ImageDtt
.
DISPARITY_INDEX_VERT_STRENGTH
].
clone
();
}
return
strength_vert
;
}
}
/**
/**
...
@@ -267,6 +289,8 @@ public class TileProcessor {
...
@@ -267,6 +289,8 @@ public class TileProcessor {
final
boolean
[]
selection
,
final
boolean
[]
selection
,
final
double
weakStrength
,
// strength to be considered weak, subject to this replacement
final
double
weakStrength
,
// strength to be considered weak, subject to this replacement
final
double
maxDiff
,
final
double
maxDiff
,
final
double
maxDiffPos
,
// Replace weak outlayer tiles that have higher disparity than weighted average
final
double
maxDiffNeg
,
// Replace weak outlayer tiles that have lower disparity than weighted average
final
double
disparityFar
,
final
double
disparityFar
,
final
double
disparityNear
)
final
double
disparityNear
)
{
{
...
@@ -278,7 +302,7 @@ public class TileProcessor {
...
@@ -278,7 +302,7 @@ public class TileProcessor {
final
double
[]
strength
=
getStrength
();
final
double
[]
strength
=
getStrength
();
final
double
absMinDisparity
=
0.5
*
disparityFar
;
// adjust? below this is definitely wrong (weak)
final
double
absMinDisparity
=
0.5
*
disparityFar
;
// adjust? below this is definitely wrong (weak)
final
double
absMaxDisparity
=
1.5
*
disparityNear
;
// change?
final
double
absMaxDisparity
=
1.5
*
disparityNear
;
// change?
final
int
dbg_nTile
=
46462
;
// 41545;
final
int
dbg_nTile
=
4
2228
;
// x = 108, y = 130 4
6462; // 41545;
final
Thread
[]
threads
=
ImageDtt
.
newThreadArray
(
threadsMax
);
final
Thread
[]
threads
=
ImageDtt
.
newThreadArray
(
threadsMax
);
// first pass = find outlayers
// first pass = find outlayers
final
AtomicInteger
ai
=
new
AtomicInteger
(
0
);
final
AtomicInteger
ai
=
new
AtomicInteger
(
0
);
...
@@ -302,19 +326,28 @@ public class TileProcessor {
...
@@ -302,19 +326,28 @@ public class TileProcessor {
if
((
tileY
>
0
)
&&
(
tileY
<
(
tilesY
-
1
))
&&(
tileX
>
0
)
&&
(
tileX
<
(
tilesX
-
1
))){
// disregard outer row/cols
if
((
tileY
>
0
)
&&
(
tileY
<
(
tilesY
-
1
))
&&(
tileX
>
0
)
&&
(
tileX
<
(
tilesX
-
1
))){
// disregard outer row/cols
weakOutlayers
[
nTile
]
=
true
;
weakOutlayers
[
nTile
]
=
true
;
boolean
hasNeighbors
=
false
;
boolean
hasNeighbors
=
false
;
double
sd
=
0.0
,
sw
=
0.0
;
for
(
int
dir
=
0
;
dir
<
dirs
.
length
;
dir
++){
for
(
int
dir
=
0
;
dir
<
dirs
.
length
;
dir
++){
int
nTile1
=
nTile
+
dirs
[
dir
];
int
nTile1
=
nTile
+
dirs
[
dir
];
double
dbg_disparity_nTile1
=
disparity
[
nTile1
];
double
dbg_disparity_nTile1
=
disparity
[
nTile1
];
if
(((
selection
==
null
)
||
selection
[
nTile1
])
&&
if
(((
selection
==
null
)
||
selection
[
nTile1
])
&&
(
disparity
[
nTile1
]
>=
disparityFar
)
&&
// don't count on too near/too far for averaging
(
disparity
[
nTile1
]
>=
disparityFar
)
&&
// don't count on too near/too far for averaging
(
disparity
[
nTile1
]
<=
disparityNear
)){
(
disparity
[
nTile1
]
<=
disparityNear
)){
double
w
=
strength
[
nTile1
];
sw
+=
w
;
sd
+=
w
*
disparity
[
nTile1
];
hasNeighbors
=
true
;
hasNeighbors
=
true
;
if
(
Math
.
abs
(
disparity
[
nTile
]-
disparity
[
nTile1
])
<=
maxDiff
){
// any outlayer - will be false
if
(
Math
.
abs
(
disparity
[
nTile
]-
disparity
[
nTile1
])
<=
maxDiff
){
// any outlayer - will be false
weakOutlayers
[
nTile
]
=
false
;
weakOutlayers
[
nTile
]
=
false
;
break
;
//
break;
}
}
}
}
}
}
if
(
sw
>=
0.0
)
{
sd
/=
sw
;
if
(
disparity
[
nTile
]
<
(
sd
-
maxDiffNeg
))
weakOutlayers
[
nTile
]
=
true
;
else
if
(
disparity
[
nTile
]
>
(
sd
+
maxDiffPos
))
weakOutlayers
[
nTile
]
=
true
;
}
if
(
disparity
[
nTile
]
<
disparityFar
)
weakOutlayers
[
nTile
]
=
true
;
if
(
disparity
[
nTile
]
<
disparityFar
)
weakOutlayers
[
nTile
]
=
true
;
if
(
disparity
[
nTile
]
>
disparityNear
)
weakOutlayers
[
nTile
]
=
true
;
if
(
disparity
[
nTile
]
>
disparityNear
)
weakOutlayers
[
nTile
]
=
true
;
if
(!
hasNeighbors
)
{
if
(!
hasNeighbors
)
{
...
@@ -1115,6 +1148,362 @@ public class TileProcessor {
...
@@ -1115,6 +1148,362 @@ public class TileProcessor {
clt_3d_passes
=
new
ArrayList
<
CLTPass3d
>();
clt_3d_passes
=
new
ArrayList
<
CLTPass3d
>();
}
}
/**
* Basic combining: find smallest residual disparity and use the tile data from it
* Copy link to texture tile from the same pass, "forced" bit in tile_op is copied too
* Even when this method compares calculated values, it still only copies raw ones, all derivatives should
* be re-calculated for the new combined pass
*
* Calculates max_tried_disparity that shows maximal tried dieparity for each tile, regardless of the reulsts/strength
* @param passes list of passes to merge
* @param firstPass first index in the list to use
* @param lastPass last index in the list to use
* @param debugLevel debug level
* @return combined pass, contains same data as after the measuremnt of the actual one
*/
public
CLTPass3d
combinePasses
(
final
ArrayList
<
CLTPass3d
>
passes
,
final
int
firstPass
,
final
int
lastPassPlus1
,
final
boolean
skip_combo
,
// do not process other combo scans
final
boolean
use_last
,
// use last scan data if nothing better
final
boolean
useCombo
,
// use combined disparity/strength (false - use measured full correlation
// TODO: when useCombo - pay attention to borders (disregard)
final
boolean
usePoly
,
// use polynomial method to find max), valid if useCombo == false
final
double
minStrength
,
// ignore too weak tiles
final
boolean
show_combined
)
{
CLTPass3d
combo_pass
=
new
CLTPass3d
();
final
int
tlen
=
tilesX
*
tilesY
;
combo_pass
.
disparity
=
new
double
[
tilesY
][
tilesX
];
combo_pass
.
tile_op
=
new
int
[
tilesY
][
tilesX
];
combo_pass
.
disparity_map
=
new
double
[
ImageDtt
.
DISPARITY_TITLES
.
length
][
tlen
];
combo_pass
.
texture_tiles
=
new
double
[
tilesY
][
tilesX
][][];
combo_pass
.
max_tried_disparity
=
new
double
[
tilesY
][
tilesX
];
combo_pass
.
is_combo
=
true
;
showDoubleFloatArrays
sdfa_instance
=
null
;
String
[]
titles
=
null
;
int
dbg_tile
=
-
1
;
// 27669;
double
[][]
dbg_data
=
null
;
if
(
show_combined
)
{
sdfa_instance
=
new
showDoubleFloatArrays
();
// just for debugging?
int
numScans
=
lastPassPlus1
-
firstPass
;
titles
=
new
String
[
3
*
(
numScans
+
1
)
+
1
];
dbg_data
=
new
double
[
titles
.
length
][
tlen
];
for
(
int
i
=
0
;
i
<
numScans
;
i
++)
{
CLTPass3d
dbg_pass
=
passes
.
get
(
firstPass
+
i
);
if
(
dbg_pass
.
disparity_map
!=
null
)
{
for
(
int
ty
=
0
;
ty
<
tilesY
;
ty
++)
for
(
int
tx
=
0
;
tx
<
tilesX
;
tx
++){
int
nt
=
ty
*
tilesX
+
tx
;
dbg_data
[
i
][
nt
]
=
dbg_pass
.
disparity
[
ty
][
tx
];
dbg_data
[
i
+
1
*
(
numScans
+
1
)][
nt
]
=
dbg_pass
.
disparity_map
[
ImageDtt
.
DISPARITY_INDEX_CM
][
nt
];
dbg_data
[
i
+
2
*
(
numScans
+
1
)][
nt
]
=
dbg_pass
.
disparity_map
[
ImageDtt
.
DISPARITY_STRENGTH_INDEX
][
nt
];
}
titles
[
i
]
=
"disparity_"
+
i
;
titles
[
i
+
1
*
(
numScans
+
1
)]
=
"cm_disparity_"
+
i
;
titles
[
i
+
2
*
(
numScans
+
1
)]
=
"strength_"
+
i
;
}
}
}
for
(
int
ty
=
0
;
ty
<
tilesY
;
ty
++)
for
(
int
tx
=
0
;
tx
<
tilesX
;
tx
++)
combo_pass
.
texture_tiles
[
ty
][
tx
]
=
null
;
for
(
int
ty
=
0
;
ty
<
tilesY
;
ty
++)
{
for
(
int
tx
=
0
;
tx
<
tilesX
;
tx
++){
int
nt
=
ty
*
tilesX
+
tx
;
int
best_index
=
-
1
;
int
best_weak_index
=
-
1
;
double
adiff_best
=
Double
.
NaN
;
double
adiff_best_weak
=
Double
.
NaN
;
combo_pass
.
max_tried_disparity
[
ty
][
tx
]
=
0.0
;
if
(
useCombo
&&
(
nt
==
dbg_tile
)){
System
.
out
.
println
(
"combinePasses(): nt = "
+
nt
+
", tx = "
+
tx
+
", ty = "
+
ty
+
", useCombo = "
+
useCombo
);
}
for
(
int
ipass
=
firstPass
;
ipass
<
lastPassPlus1
;
ipass
++
){
CLTPass3d
pass
=
passes
.
get
(
ipass
);
if
(
useCombo
&&
(
nt
==
dbg_tile
))
{
System
.
out
.
println
(
"combinePasses(): ipass = "
+
ipass
+
" nt = "
+
nt
+
" pass.tile_op["
+
ty
+
"]["
+
tx
+
"]="
+
pass
.
tile_op
[
ty
][
tx
]+
" pass.isCombo()="
+(
pass
.
isCombo
())+
" pass.isProcessed()="
+(
pass
.
isProcessed
()));
}
if
(
(
pass
.
tile_op
[
ty
][
tx
]
!=
0
)
&&
(
useCombo
?
pass
.
isProcessed
()
:
pass
.
isMeasured
())
&&
!(
skip_combo
&&
pass
.
isCombo
())){
if
(
pass
.
disparity
[
ty
][
tx
]
>
combo_pass
.
max_tried_disparity
[
ty
][
tx
])
combo_pass
.
max_tried_disparity
[
ty
][
tx
]
=
pass
.
disparity
[
ty
][
tx
];
if
(!
useCombo
||
pass
.
isProcessed
())
{
double
adiff
,
strength
;
if
(
useCombo
&&
(
nt
==
dbg_tile
)){
System
.
out
.
println
(
"combinePasses(): pass.calc_disparity["
+
nt
+
"]="
+
pass
.
calc_disparity
[
nt
]+
" pass.disparity["
+
ty
+
"]["
+
tx
+
"] = "
+
pass
.
disparity
[
ty
][
tx
]);
}
if
(
useCombo
)
{
// compare difference between preset disparity and the combined one
adiff
=
Math
.
abs
(
pass
.
calc_disparity
[
nt
]
-
pass
.
disparity
[
ty
][
tx
]);
strength
=
pass
.
strength
[
nt
];
}
else
if
(
usePoly
)
{
// just an amplitude of the polynomial maximum calculated disparity
adiff
=
Math
.
abs
(
pass
.
disparity_map
[
ImageDtt
.
DISPARITY_INDEX_POLY
][
nt
]);
// polynomial method
strength
=
Math
.
abs
(
pass
.
disparity_map
[
ImageDtt
.
DISPARITY_STRENGTH_INDEX
][
nt
]);
}
else
{
// just an amplitude of center of mass calculated disparity
adiff
=
Math
.
abs
(
pass
.
disparity_map
[
ImageDtt
.
DISPARITY_INDEX_CM
][
nt
]);
// center mass method
strength
=
Math
.
abs
(
pass
.
disparity_map
[
ImageDtt
.
DISPARITY_STRENGTH_INDEX
][
nt
]);
}
if
((
strength
>
0.0
)
&&
!
Double
.
isNaN
(
adiff
)
&&
((
best_weak_index
<
0
)
||
(
adiff
<
adiff_best_weak
)))
{
best_weak_index
=
ipass
;
adiff_best_weak
=
adiff
;
}
if
((
strength
>
minStrength
)
&&
!
Double
.
isNaN
(
adiff
)
&&
((
best_index
<
0
)
||
(
adiff
<
adiff_best
)))
{
best_index
=
ipass
;
adiff_best
=
adiff
;
}
if
(
useCombo
&&
(
nt
==
dbg_tile
)){
System
.
out
.
println
(
"combinePasses(): strength="
+
strength
+
" best_weak_index="
+
best_weak_index
+
" best_index="
+
best_index
+
" adiff_best="
+
adiff_best
+
" ipass="
+
ipass
+
"adiff="
+
adiff
);
}
}
}
}
if
(
use_last
&&
(
best_index
<
0
))
{
CLTPass3d
pass
=
passes
.
get
(
lastPassPlus1
-
1
);
if
(
pass
.
tile_op
[
ty
][
tx
]
!=
0
)
best_index
=
lastPassPlus1
-
1
;
else
if
(
best_weak_index
>=
0
)
best_index
=
best_weak_index
;
}
if
(
best_index
>=
0
){
CLTPass3d
pass
=
passes
.
get
(
best_index
);
combo_pass
.
tile_op
[
ty
][
tx
]
=
pass
.
tile_op
[
ty
][
tx
];
combo_pass
.
disparity
[
ty
][
tx
]
=
pass
.
disparity
[
ty
][
tx
];
if
((
pass
.
texture_tiles
==
null
)
||(
combo_pass
.
texture_tiles
==
null
))
{
if
((
ty
==
0
)
&&
(
tx
==
0
))
{
System
.
out
.
println
(
"BUG: best_index="
+
best_index
);
}
}
else
{
combo_pass
.
texture_tiles
[
ty
][
tx
]
=
pass
.
texture_tiles
[
ty
][
tx
];
for
(
int
i
=
0
;
i
<
ImageDtt
.
DISPARITY_TITLES
.
length
;
i
++){
combo_pass
.
disparity_map
[
i
][
nt
]
=
pass
.
disparity_map
[
i
][
nt
];
}
}
// do not copy any of the calculated values - they should be re-calculated
}
}
}
if
(
show_combined
)
{
int
numScans
=
lastPassPlus1
-
firstPass
;
for
(
int
ty
=
0
;
ty
<
tilesY
;
ty
++)
for
(
int
tx
=
0
;
tx
<
tilesX
;
tx
++){
int
nt
=
ty
*
tilesX
+
tx
;
dbg_data
[
numScans
][
nt
]
=
combo_pass
.
disparity
[
ty
][
tx
];
dbg_data
[
numScans
+
1
*
(
numScans
+
1
)][
nt
]
=
combo_pass
.
disparity_map
[
ImageDtt
.
DISPARITY_INDEX_CM
][
nt
];
dbg_data
[
numScans
+
2
*
(
numScans
+
1
)][
nt
]
=
combo_pass
.
disparity_map
[
ImageDtt
.
DISPARITY_STRENGTH_INDEX
][
nt
];
dbg_data
[
3
*
(
numScans
+
1
)][
nt
]
=
combo_pass
.
max_tried_disparity
[
ty
][
tx
];
}
titles
[
numScans
]
=
"disparity_combo"
;
titles
[
numScans
+
1
*
(
numScans
+
1
)]
=
"cm_disparity_combo"
;
titles
[
numScans
+
2
*
(
numScans
+
1
)]
=
"strength_combo"
;
titles
[
3
*
(
numScans
+
1
)]
=
"max_tried_disparity"
;
sdfa_instance
.
showArrays
(
dbg_data
,
tilesX
,
tilesY
,
true
,
"combo_scan_"
+
lastPassPlus1
,
titles
);
}
return
combo_pass
;
}
public
int
[]
makeUnique
(
final
ArrayList
<
CLTPass3d
>
passes
,
final
int
firstPass
,
final
int
lastPassPlus1
,
final
CLTPass3d
new_scan
,
final
double
unique_tolerance
,
final
boolean
show_unique
)
{
int
[][]
dbg_tile_op
=
null
;
if
(
show_unique
){
dbg_tile_op
=
new
int
[
tilesY
][];
for
(
int
ty
=
0
;
ty
<
tilesY
;
ty
++){
dbg_tile_op
[
ty
]
=
new_scan
.
tile_op
[
ty
].
clone
();
}
}
int
removed
=
0
,
total
=
0
;
for
(
int
ty
=
0
;
ty
<
tilesY
;
ty
++)
{
for
(
int
tx
=
0
;
tx
<
tilesX
;
tx
++){
if
(
new_scan
.
tile_op
[
ty
][
tx
]
!=
0
){
total
++;
for
(
int
ipass
=
firstPass
;
ipass
<
lastPassPlus1
;
ipass
++
){
CLTPass3d
pass
=
passes
.
get
(
ipass
);
if
(
pass
.
tile_op
[
ty
][
tx
]
!=
0
){
if
(
Math
.
abs
(
new_scan
.
disparity
[
ty
][
tx
]
-
pass
.
disparity
[
ty
][
tx
])
<
unique_tolerance
){
new_scan
.
tile_op
[
ty
][
tx
]
=
0
;
removed
++;
break
;
}
}
}
}
}
}
if
(
show_unique
){
showDoubleFloatArrays
sdfa_instance
=
new
showDoubleFloatArrays
();
double
[]
dbg_data
=
new
double
[
tilesY
*
tilesX
];
for
(
int
ty
=
0
;
ty
<
tilesY
;
ty
++)
for
(
int
tx
=
0
;
tx
<
tilesX
;
tx
++){
dbg_data
[
ty
*
tilesX
+
tx
]
=
(
dbg_tile_op
[
ty
][
tx
]
==
0
)
?
0
:
((
new_scan
.
tile_op
[
ty
][
tx
]
==
0.0
)
?
1.0
:
2.0
);
}
sdfa_instance
.
showArrays
(
dbg_data
,
tilesX
,
tilesY
,
"unique_scan_"
+
lastPassPlus1
);
}
int
[]
rslt
=
{
total
-
removed
,
removed
};
return
rslt
;
}
public
void
setupExtendDisparity
(
final
CLTPass3d
scan
,
// combined scan with max_tried_disparity, will be modified to re-scan
final
CLTPass3d
last_scan
,
// last prepared tile - can use last_scan.disparity, .border_tiles and .selected
final
CLTPass3d
bg_scan
,
// background scan data
final
int
grow_sweep
,
// 8; // Try these number of tiles around known ones
final
double
grow_disp_max
,
// = 50.0; // Maximal disparity to try
final
double
grow_disp_trust
,
// = 4.0; // Trust measured disparity within +/- this value
final
double
grow_disp_step
,
// = 6.0; // Increase disparity (from maximal tried) if nothing found in that tile // TODO: handle enclosed dips?
final
double
grow_min_diff
,
// = 0.5; // Grow more only if at least one channel has higher variance from others for the tile
EyesisCorrectionParameters
.
CLTParameters
clt_parameters
,
GeometryCorrection
geometryCorrection
,
final
boolean
show_debug
,
final
int
threadsMax
,
// maximal number of threads to launch
final
boolean
updateStatus
,
final
int
debugLevel
)
{
final
int
tlen
=
tilesY
*
tilesX
;
double
[][]
dbg_img
=
null
;
String
[]
dbg_titles
=
null
;
showDoubleFloatArrays
sdfa_instance
=
null
;
DisparityProcessor
dp
=
new
DisparityProcessor
(
this
,
clt_parameters
.
transform_size
*
geometryCorrection
.
getScaleDzDx
());
double
[]
disparity
=
scan
.
getDisparity
();
// to modify in-place
boolean
[]
these_no_border
=
new
boolean
[
tlen
];
for
(
int
i
=
0
;
i
<
these_no_border
.
length
;
i
++)
{
these_no_border
[
i
]
=
last_scan
.
selected
[
i
]
&&
!
last_scan
.
border_tiles
[
i
];
}
boolean
[]
known_tiles
=
these_no_border
.
clone
();
// known are background or these tiles
for
(
int
i
=
0
;
i
<
known_tiles
.
length
;
i
++)
{
known_tiles
[
i
]
|=
bg_scan
.
selected
[
i
];
}
// set combo disparity from last prepared
for
(
int
nt
=
0
;
nt
<
known_tiles
.
length
;
nt
++){
int
ty
=
nt
/
tilesX
;
int
tx
=
nt
%
tilesX
;
disparity
[
nt
]
=
0.0
;
/// if (last_scan.selected[nt]) disparity[nt] = last_scan.disparity[ty][tx];
if
(
these_no_border
[
nt
])
disparity
[
nt
]
=
last_scan
.
disparity
[
ty
][
tx
];
}
boolean
[]
grown
=
known_tiles
.
clone
();
growTiles
(
2
*
grow_sweep
,
// grow tile selection by 1 over non-background tiles 1: 4 directions, 2 - 8 directions, 3 - 8 by 1, 4 by 1 more
grown
,
// boolean [] tiles,
null
);
// boolean [] prohibit)
boolean
[]
border
=
grown
.
clone
();
for
(
int
i
=
0
;
i
<
border
.
length
;
i
++)
border
[
i
]
&=
!
known_tiles
[
i
];
int
[]
neighbors
=
dp
.
getNeighbors
(
// creates neighbors mask from bitmask
grown
,
// these_tiles, // grown, // these_tiles, // boolean [] selected,
tilesX
);
/*
if (clt_parameters.show_neighbors) {
double [] dbg_neib = dp.dbgShowNeighbors(
grown, // these_tiles, // grown, // these_tiles,
neighbors, // _orig, // int [] neighbors,
clt_parameters.transform_size, // int tile_size,
-1.0, // double bgnd,
1.0); // double fgnd)
sdfa_instance.showArrays(dbg_neib,tilesX*clt_parameters.transform_size, tilesY*clt_parameters.transform_size,"XXneighbors");
}
*/
dp
.
smoothDisparity
(
clt_parameters
.
tiDispPull
,
// final double dispPull, // clt_parameters.tiDispPull or 0.0
2
,
// 2, // 3, // final int mask, // 1 - work on internal elements, 2 - on border elements, 3 - both (internal first);
clt_parameters
.
tiIterations
,
// final int num_passes,
Math
.
pow
(
10.0
,
-
clt_parameters
.
tiPrecision
),
// final double maxDiff, // maximal change in any of the disparity values
neighbors
,
// final int [] neighbors, // +1 - up (N), +2 - up-right - NE, ... +0x80 - NW
scan
.
getDisparity
(),
// final double [] disparity, // current disparity value
scan
.
getDisparity
().
clone
(),
// final double [] measured_disparity, // measured disparity
scan
.
getStrength
(),
// final double [] strength,
null
,
// this_hor_disparity, // final double hor_disparity, // not yet used
null
,
// hor_strength_conv, // final double hor_strength, // not yet used
known_tiles
,
// these_tiles, // grown, // these_tiles, // final boolean [] selected,
border
,
// final boolean [] border,
clt_parameters
,
threadsMax
,
// maximal number of threads to launch
debugLevel
);
scan
.
selected
=
grown
;
scan
.
border_tiles
=
border
;
scan
.
disparity
=
new
double
[
tilesY
][
tilesX
];
scan
.
tile_op
=
new
int
[
tilesY
][
tilesX
];
int
op
=
ImageDtt
.
setImgMask
(
0
,
0xf
);
op
=
ImageDtt
.
setPairMask
(
op
,
0xf
);
op
=
ImageDtt
.
setForcedDisparity
(
op
,
true
);
for
(
int
ty
=
0
;
ty
<
tilesY
;
ty
++)
for
(
int
tx
=
0
;
tx
<
tilesX
;
tx
++){
int
indx
=
tilesX
*
ty
+
tx
;
if
(
scan
.
selected
[
indx
])
{
scan
.
disparity
[
ty
][
tx
]
=
disparity
[
indx
];
scan
.
tile_op
[
ty
][
tx
]
=
op
;
}
else
{
scan
.
disparity
[
ty
][
tx
]
=
0.0
;
scan
.
tile_op
[
ty
][
tx
]
=
0
;
}
}
if
(
show_debug
){
String
[]
dbg_titles0
=
{
"tried"
,
"disparity"
,
"bgnd"
,
"these"
,
"known_in"
,
"known"
};
dbg_titles
=
dbg_titles0
;
dbg_img
=
new
double
[
dbg_titles
.
length
][];
dbg_img
[
0
]
=
new
double
[
tilesY
*
tilesX
];
// dbg_img[1] = scan.getDisparity();
dbg_img
[
1
]
=
new
double
[
tilesY
*
tilesX
];
dbg_img
[
2
]
=
new
double
[
tilesY
*
tilesX
];
dbg_img
[
3
]
=
new
double
[
tilesY
*
tilesX
];
dbg_img
[
4
]
=
new
double
[
tilesY
*
tilesX
];
dbg_img
[
5
]
=
new
double
[
tilesY
*
tilesX
];
for
(
int
i
=
0
;
i
<
tlen
;
i
++){
int
ty
=
i
/
tilesX
;
int
tx
=
i
%
tilesX
;
dbg_img
[
0
][
i
]
=
scan
.
max_tried_disparity
[
ty
][
tx
];
dbg_img
[
1
][
i
]
=
scan
.
disparity
[
ty
][
tx
];
// dbg_img[1][i] = last_scan.disparity[ty][tx];
dbg_img
[
2
][
i
]
=
((
bg_scan
.
selected
!=
null
)
&&
(
bg_scan
.
selected
[
i
]))?
1.0
:
0.0
;
dbg_img
[
3
][
i
]
=
((
last_scan
.
selected
!=
null
)
&&
(
last_scan
.
selected
[
i
]))?
1.0
:
0.0
;
dbg_img
[
4
][
i
]
=
dbg_img
[
2
][
i
]
+
dbg_img
[
3
][
i
];
dbg_img
[
5
][
i
]
=
(
scan
.
selected
[
i
]?
1
:
0
)+
(
scan
.
border_tiles
[
i
]?
2
:
0
);
}
sdfa_instance
=
new
showDoubleFloatArrays
();
// just for debugging?
sdfa_instance
.
showArrays
(
dbg_img
,
tilesX
,
tilesY
,
true
,
"extend_disparity"
,
dbg_titles
);
}
}
public
CLTPass3d
prepareExtendDisparityScan
(
final
CLTPass3d
base_scan
,
// should be a combo, including non-null max_tried_disparity
CLTPass3d
new_scan
,
// either semi-prepared scan (i.e. refined disparities) or null
final
int
grow_sweep
,
// double number of extra rows/columns to add
final
double
grow_disp_max
,
// = 50.0; // Maximal disparity to try
final
double
grow_disp_trust
,
// = 4.0; // Trust measured disparity within +/- this value
final
double
grow_disp_step
,
// = 6.0; // Increase disparity (from maximal tried) if nothing found in that tile // TODO: handle enclosed dips?
final
double
grow_min_diff
,
// = 0.5; // Grow more only if at least one channel has higher variance from others for the tile
final
int
debugLevel
)
{
boolean
is_new_scan
=
new_scan
==
null
;
final
CLTPass3d
scan
=
is_new_scan
?
new
CLTPass3d
()
:
new_scan
;
return
scan
;
}
//sure_smth
//sure_smth
public
boolean
[]
getBackgroundMask
(
// which tiles do belong to the background
public
boolean
[]
getBackgroundMask
(
// which tiles do belong to the background
double
bgnd_range
,
// disparity range to be considered background
double
bgnd_range
,
// disparity range to be considered background
...
@@ -1124,6 +1513,7 @@ public class TileProcessor {
...
@@ -1124,6 +1513,7 @@ public class TileProcessor {
int
min_clstr_seed
,
// number of tiles in a cluster to seed (just background?)
int
min_clstr_seed
,
// number of tiles in a cluster to seed (just background?)
int
min_clstr_block
,
// number of tiles in a cluster to block (just non-background?)
int
min_clstr_block
,
// number of tiles in a cluster to block (just non-background?)
int
disparity_index
,
// index of disparity value in disparity_map == 2 (0,2 or 4)
int
disparity_index
,
// index of disparity value in disparity_map == 2 (0,2 or 4)
boolean
show_bgnd_nonbgnd
,
int
debugLevel
int
debugLevel
){
){
boolean
[]
bgnd_tiles
=
new
boolean
[
tilesY
*
tilesX
];
boolean
[]
bgnd_tiles
=
new
boolean
[
tilesY
*
tilesX
];
...
@@ -1185,7 +1575,7 @@ public class TileProcessor {
...
@@ -1185,7 +1575,7 @@ public class TileProcessor {
0.0
);
// clt_parameters.min_clstr_max); // double min_max_weight // minimal value of the maximal strengh in the cluster
0.0
);
// clt_parameters.min_clstr_max); // double min_max_weight // minimal value of the maximal strengh in the cluster
}
}
if
(
sdfa_instance
!=
null
)
{
if
(
(
sdfa_instance
!=
null
)
&&
show_bgnd_nonbgnd
)
{
String
[]
titles
=
{
"bgnd"
,
"nonbgnd"
,
"block"
,
"strength"
,
"disparity"
};
String
[]
titles
=
{
"bgnd"
,
"nonbgnd"
,
"block"
,
"strength"
,
"disparity"
};
double
[][]
dbg_img
=
new
double
[
titles
.
length
][
tilesY
*
tilesX
];
double
[][]
dbg_img
=
new
double
[
titles
.
length
][
tilesY
*
tilesX
];
for
(
int
i
=
0
;
i
<
dbg_img
[
0
].
length
;
i
++){
for
(
int
i
=
0
;
i
<
dbg_img
[
0
].
length
;
i
++){
...
@@ -1286,6 +1676,7 @@ public class TileProcessor {
...
@@ -1286,6 +1676,7 @@ public class TileProcessor {
int
[][][]
overlap_clusters
,
// [cluster] {cd.internal, cd.border_fixed, cd.border_float}
int
[][][]
overlap_clusters
,
// [cluster] {cd.internal, cd.border_fixed, cd.border_float}
double
minDisparity
,
double
minDisparity
,
double
maxDisparity
,
double
maxDisparity
,
boolean
show_shells
,
int
debugLevel
)
int
debugLevel
)
{
{
final
int
maxrep
=
1000
;
final
int
maxrep
=
1000
;
...
@@ -1439,7 +1830,7 @@ public class TileProcessor {
...
@@ -1439,7 +1830,7 @@ public class TileProcessor {
if
(
debugLevel
>
-
1
)
{
if
(
debugLevel
>
-
1
)
{
System
.
out
.
println
(
"createTileOverlapTasks(): prepared "
+
numClusters
+
" clusters"
);
System
.
out
.
println
(
"createTileOverlapTasks(): prepared "
+
numClusters
+
" clusters"
);
}
}
if
(
debugLevel
>
-
1
)
{
if
(
(
debugLevel
>
-
1
)
&&
show_shells
)
{
double
[][]
dbg_shells
=
new
double
[
clt_3d_passes
.
size
()
-
startPass
][
tilesY
*
tilesX
+
1
];
double
[][]
dbg_shells
=
new
double
[
clt_3d_passes
.
size
()
-
startPass
][
tilesY
*
tilesX
+
1
];
double
ampl
=
dbg_shells
.
length
;
double
ampl
=
dbg_shells
.
length
;
for
(
int
ns
=
1
;
ns
<
dbg_shells
.
length
;
ns
++)
{
for
(
int
ns
=
1
;
ns
<
dbg_shells
.
length
;
ns
++)
{
...
@@ -1897,13 +2288,33 @@ public class TileProcessor {
...
@@ -1897,13 +2288,33 @@ public class TileProcessor {
}
}
}
}
public
void
removeLoneClusters
(
boolean
diag_en
,
// enable diagonal directions, false only up, dowm, right,left
boolean
[]
tiles_src
,
// selected tiles, will modified
double
[]
weights_src
,
// or null
int
min_area
,
// minimal number of pixels
double
min_weight
,
// minimal total weight of the cluster
double
min_max_weight
// minimal value of the maximal strengh in the cluster
){
removeLoneClusters
(
diag_en
,
// enable diagonal directions, false only up, dowm, right,left
tiles_src
,
// selected tiles, will modified
weights_src
,
// or null
min_area
,
// minimal number of pixels
min_weight
,
// minimal total weight of the cluster
min_max_weight
,
0
// minimal value of the maximal strengh in the cluster
);
}
public
void
removeLoneClusters
(
public
void
removeLoneClusters
(
boolean
diag_en
,
// enable diagonal directions, false only up, dowm, right,left
boolean
diag_en
,
// enable diagonal directions, false only up, dowm, right,left
boolean
[]
tiles
,
// selected tiles, will modified
boolean
[]
tiles
,
// selected tiles, will modified
double
[]
weights_src
,
// or null
double
[]
weights_src
,
// or null
int
min_area
,
// minimal number of pixels
int
min_area
,
// minimal number of pixels
double
min_weight
,
// minimal total weight of the cluster (expanded!
double
min_weight
,
// minimal total weight of the cluster (expanded!
double
min_max_weight
// minimal value of the maximal strengh in tghe cluster
double
min_max_weight
,
// minimal value of the maximal strengh in tghe cluster
int
debugLevel
){
){
boolean
[]
grown_by_1
=
tiles
.
clone
();
boolean
[]
grown_by_1
=
tiles
.
clone
();
growTiles
(
2
,
// 1, // 2, // grow tile selection by 1 over non-background tiles 1: 4 directions, 2 - 8 directions, 3 - 8 by 1, 4 by 1 more
growTiles
(
2
,
// 1, // 2, // grow tile selection by 1 over non-background tiles 1: 4 directions, 2 - 8 directions, 3 - 8 by 1, 4 by 1 more
...
@@ -1916,20 +2327,8 @@ public class TileProcessor {
...
@@ -1916,20 +2327,8 @@ public class TileProcessor {
weights_src
,
// or null
weights_src
,
// or null
grown_min
,
// minimal number of pixels
grown_min
,
// minimal number of pixels
min_weight
,
// minimal total weight of the cluster
min_weight
,
// minimal total weight of the cluster
min_max_weight
);
// minimal value of the maximal strengh in tghe cluster
min_max_weight
,
// minimal value of the maximal strengh in tghe cluster
/*** showDoubleFloatArrays sdfa_instance = new showDoubleFloatArrays(); // just for debugging?
debugLevel
);
String [] titles = {"orig","grown","combined"};
double [][] dbg_img = new double [titles.length][tiles.length];
for (int i = 0; i<tiles.length; i++){
dbg_img[0][i] = tiles[i]? 1:-1;
dbg_img[1][i] = grown_by_1[i]? 1:-1;
}
for (int i = 0; i< tiles.length; i++) tiles[i] &= grown_by_1[i];
for (int i = 0; i<tiles.length; i++){
dbg_img[2][i] = tiles[i]? 1:-1;
}
sdfa_instance.showArrays(dbg_img, tilesX, tilesY, true, "bgnd_nonbgnd",titles);
***/
for
(
int
i
=
0
;
i
<
tiles
.
length
;
i
++)
tiles
[
i
]
&=
grown_by_1
[
i
];
for
(
int
i
=
0
;
i
<
tiles
.
length
;
i
++)
tiles
[
i
]
&=
grown_by_1
[
i
];
}
}
...
@@ -1941,7 +2340,29 @@ public class TileProcessor {
...
@@ -1941,7 +2340,29 @@ public class TileProcessor {
double
min_weight
,
// minimal total weight of the cluster
double
min_weight
,
// minimal total weight of the cluster
double
min_max_weight
// minimal value of the maximal strengh in the cluster
double
min_max_weight
// minimal value of the maximal strengh in the cluster
){
){
removeSmallClusters
(
diag_en
,
// enable diagonal directions, false only up, dowm, right,left
tiles_src
,
// selected tiles, will modified
weights_src
,
// or null
min_area
,
// minimal number of pixels
min_weight
,
// minimal total weight of the cluster
min_max_weight
,
0
// minimal value of the maximal strengh in the cluster
);
}
public
void
removeSmallClusters
(
boolean
diag_en
,
// enable diagonal directions, false only up, dowm, right,left
boolean
[]
tiles_src
,
// selected tiles, will modified
double
[]
weights_src
,
// or null
int
min_area
,
// minimal number of pixels
double
min_weight
,
// minimal total weight of the cluster
double
min_max_weight
,
// minimal value of the maximal strengh in the cluster
int
debugLevel
){
// adding 1-tile frame around to avoid checking for the borders
// adding 1-tile frame around to avoid checking for the borders
int
dbg_tile2
=
50095
;
// 217+326*153
int
tilesX2
=
tilesX
+
2
;
int
tilesX2
=
tilesX
+
2
;
int
tilesY2
=
tilesY
+
2
;
int
tilesY2
=
tilesY
+
2
;
boolean
[]
tiles
=
new
boolean
[
tilesX2
*
tilesY2
];
boolean
[]
tiles
=
new
boolean
[
tilesX2
*
tilesY2
];
...
@@ -1974,6 +2395,9 @@ public class TileProcessor {
...
@@ -1974,6 +2395,9 @@ public class TileProcessor {
for
(
int
i
=
0
;
i
<
tiles
.
length
;
i
++)
waves
[
i
]
=
tiles
[
i
]
?
0
:
-
1
;
for
(
int
i
=
0
;
i
<
tiles
.
length
;
i
++)
waves
[
i
]
=
tiles
[
i
]
?
0
:
-
1
;
ArrayList
<
Integer
>
front
=
new
ArrayList
<
Integer
>();
ArrayList
<
Integer
>
front
=
new
ArrayList
<
Integer
>();
for
(
int
start_indx
=
0
;
start_indx
<
tiles
.
length
;
start_indx
++)
if
(
waves
[
start_indx
]
==
0
){
// found first pixel of a new cluster
for
(
int
start_indx
=
0
;
start_indx
<
tiles
.
length
;
start_indx
++)
if
(
waves
[
start_indx
]
==
0
){
// found first pixel of a new cluster
if
((
debugLevel
>
1
)
&&
(
start_indx
==
dbg_tile2
))
{
System
.
out
.
println
(
"removeSmallClusters(): start_indx="
+
start_indx
);
}
Integer
ipx
=
start_indx
;
Integer
ipx
=
start_indx
;
Integer
ipx1
;
Integer
ipx1
;
front
.
clear
();
front
.
clear
();
...
@@ -2098,9 +2522,12 @@ public class TileProcessor {
...
@@ -2098,9 +2522,12 @@ public class TileProcessor {
final
int
debugLevel
final
int
debugLevel
)
)
{
{
final
int
dbg_tile
=
49468
;
// x = 220, y = 152 (pavement line)
showDoubleFloatArrays
sdfa_instance
=
null
;
showDoubleFloatArrays
sdfa_instance
=
null
;
if
(
debugLevel
>
-
1
)
sdfa_instance
=
new
showDoubleFloatArrays
();
// just for debugging?
if
(
debugLevel
>
-
1
)
sdfa_instance
=
new
showDoubleFloatArrays
();
// just for debugging?
if
(
debugLevel
>
0
){
System
.
out
.
println
(
"FilterScan(,,"
+
disparity_far
+
", "
+
disparity_near
+
", "
+
sure_smth
);
}
final
int
tlen
=
tilesY
*
tilesX
;
final
int
tlen
=
tilesY
*
tilesX
;
double
[]
this_disparity
=
scan
.
getDisparity
();
// currently calculated, including ortho
double
[]
this_disparity
=
scan
.
getDisparity
();
// currently calculated, including ortho
double
[]
this_strength
=
scan
.
getStrength
();
// cloned, can be modified/ read back
double
[]
this_strength
=
scan
.
getStrength
();
// cloned, can be modified/ read back
...
@@ -2114,6 +2541,10 @@ public class TileProcessor {
...
@@ -2114,6 +2541,10 @@ public class TileProcessor {
boolean
[]
block_propagate
=
new
boolean
[
tlen
];
boolean
[]
block_propagate
=
new
boolean
[
tlen
];
for
(
int
i
=
0
;
i
<
tlen
;
i
++)
if
(!
Double
.
isNaN
(
this_disparity
[
i
])){
for
(
int
i
=
0
;
i
<
tlen
;
i
++)
if
(!
Double
.
isNaN
(
this_disparity
[
i
])){
if
((
debugLevel
>
0
)
&&
(
i
==
dbg_tile
)){
System
.
out
.
println
(
"FilterScan(): this_disparity["
+
i
+
"] = "
+
this_disparity
[
i
]+
"this_strength["
+
i
+
"] = "
+
this_strength
[
i
]);
}
if
(
this_disparity
[
i
]
<
disparity_far
)
{
if
(
this_disparity
[
i
]
<
disparity_far
)
{
if
(
this_strength
[
i
]
>
this_maybe
){
if
(
this_strength
[
i
]
>
this_maybe
){
if
(
bg_tiles
[
i
])
{
// far can only be among previously selected for bgnd?
if
(
bg_tiles
[
i
])
{
// far can only be among previously selected for bgnd?
...
@@ -2143,6 +2574,8 @@ public class TileProcessor {
...
@@ -2143,6 +2574,8 @@ public class TileProcessor {
block_propagate
[
i
]
=
(
these_diffs
[
imax2
][
i
]
>
sure_smth
);
block_propagate
[
i
]
=
(
these_diffs
[
imax2
][
i
]
>
sure_smth
);
}
}
boolean
[]
prohibit
=
null
;
// TBD
boolean
[]
prohibit
=
null
;
// TBD
boolean
[]
dbg_before_small
=
null
;
boolean
[]
dbg_before_lone
=
null
;
boolean
[]
dbg_before_gaps
=
null
;
boolean
[]
dbg_before_gaps
=
null
;
if
(
clt_parameters
.
min_clstr_seed
>
1
){
if
(
clt_parameters
.
min_clstr_seed
>
1
){
...
@@ -2162,23 +2595,27 @@ public class TileProcessor {
...
@@ -2162,23 +2595,27 @@ public class TileProcessor {
clt_parameters
.
min_clstr_seed
,
// int min_area, // minimal number of pixels
clt_parameters
.
min_clstr_seed
,
// int min_area, // minimal number of pixels
0.0
,
// clt_parameters.min_clstr_weight, // double min_weight // minimal total weight of the cluster
0.0
,
// clt_parameters.min_clstr_weight, // double min_weight // minimal total weight of the cluster
0.0
);
// clt_parameters.min_clstr_max); // double min_max_weight // minimal value of the maximal strengh in the cluster
0.0
);
// clt_parameters.min_clstr_max); // double min_max_weight // minimal value of the maximal strengh in the cluster
if
((
sdfa_instance
!=
null
)
&&
clt_parameters
.
show_filter_scan
)
dbg_before_small
=
these_tiles
.
clone
();
// only remove far outstanding clusters
// only remove far outstanding clusters
removeSmallClusters
(
// remove single-tile clusters - anywhere
removeSmallClusters
(
// remove single-tile clusters - anywhere
false
,
// true, // boolean diag_en, // enable diagonal directions, false only up, dowm, right,left
false
,
// true, // boolean diag_en, // enable diagonal directions, false only up, dowm, right,left
these_tiles
,
// boolean [] tiles_src, // selected tiles, will modified
these_tiles
,
// boolean [] tiles_src, // selected tiles, will modified
orig_strength
,
// null, // double [] weights_src, // or null
this_strength
,
//
orig_strength, // null, // double [] weights_src, // or null
clt_parameters
.
min_clstr_seed
,
// 2, // int min_area, // minimal number of pixels
clt_parameters
.
min_clstr_seed
,
// 2, // int min_area, // minimal number of pixels
clt_parameters
.
min_clstr_weight
,
// double min_weight // minimal total weight of the cluster
clt_parameters
.
min_clstr_weight
,
// double min_weight // minimal total weight of the cluster
clt_parameters
.
min_clstr_max
);
// double min_max_weight // minimal value of the maximal strengh in the cluster
clt_parameters
.
min_clstr_max
,
// double min_max_weight // minimal value of the maximal strengh in the cluster
debugLevel
);
if
((
sdfa_instance
!=
null
)
&&
clt_parameters
.
show_filter_scan
)
dbg_before_lone
=
these_tiles
.
clone
();
removeLoneClusters
(
removeLoneClusters
(
false
,
// true, // boolean diag_en, // enable diagonal directions, false only up, dowm, right,left
false
,
// true, // boolean diag_en, // enable diagonal directions, false only up, dowm, right,left
these_tiles
,
// boolean [] tiles_src, // selected tiles, will modified
these_tiles
,
// boolean [] tiles_src, // selected tiles, will modified
orig_strength
,
// null, // double [] weights_src, // or null
this_strength
,
//
orig_strength, // null, // double [] weights_src, // or null
clt_parameters
.
min_clstr_lone
,
// int min_area, // minimal number of pixels
clt_parameters
.
min_clstr_lone
,
// int min_area, // minimal number of pixels
clt_parameters
.
min_clstr_weight
,
// double min_weight // minimal total weight of the cluster
clt_parameters
.
min_clstr_weight
,
// double min_weight // minimal total weight of the cluster
clt_parameters
.
min_clstr_max
);
// double min_max_weight // minimal value of the maximal strengh in the cluster
clt_parameters
.
min_clstr_max
,
// double min_max_weight // minimal value of the maximal strengh in the cluster
dbg_before_gaps
=
these_tiles
.
clone
();
debugLevel
);
if
((
sdfa_instance
!=
null
)
&&
clt_parameters
.
show_filter_scan
)
dbg_before_gaps
=
these_tiles
.
clone
();
prohibit
=
far_tiles
;
// do not fill gaps over known background/far tiles
prohibit
=
far_tiles
;
// do not fill gaps over known background/far tiles
if
(
clt_parameters
.
fill_gaps
>
0
)
{
if
(
clt_parameters
.
fill_gaps
>
0
)
{
fillGaps
(
// grows, then shrinks
fillGaps
(
// grows, then shrinks
...
@@ -2194,13 +2631,14 @@ public class TileProcessor {
...
@@ -2194,13 +2631,14 @@ public class TileProcessor {
}
}
if
(
sdfa_instance
!=
null
){
if
(
(
sdfa_instance
!=
null
)
&&
clt_parameters
.
show_filter_scan
){
int
[]
enum_clusters
=
enumerateClusters
(
int
[]
enum_clusters
=
enumerateClusters
(
true
,
// boolean diag_en,
true
,
// boolean diag_en,
these_tiles
);
// boolean [] tiles_src)
these_tiles
);
// boolean [] tiles_src)
String
[]
titles
=
{
"masked"
,
"map"
,
"orig_map"
,
"hor_map"
,
"vert_map"
,
"bg_sel"
,
"far"
,
"these_gaps"
,
"these"
,
"near"
,
"block"
,
String
[]
titles
=
{
"masked"
,
"map"
,
"orig_map"
,
"hor_map"
,
"vert_map"
,
"bg_sel"
,
"far"
,
"before_small"
,
"before_lone"
,
"before_gaps"
,
"these"
,
"near"
,
"block"
,
"strength"
,
"hor-strength"
,
"vert-strength"
,
"strength"
,
"hor-strength"
,
"vert-strength"
,
"diff0"
,
"diff1"
,
"diff2"
,
"diff3"
,
"enum_clusters"
,
"disp_cm"
,
"disp_poly"
,
"disp_hor"
,
"disp_vert"
};
"diff0"
,
"diff1"
,
"diff2"
,
"diff3"
,
"enum_clusters"
,
"disp_cm"
,
"disp_poly"
,
"disp_hor"
,
"disp_vert"
};
double
[][]
dbg_img
=
new
double
[
titles
.
length
][
tilesY
*
tilesX
];
double
[][]
dbg_img
=
new
double
[
titles
.
length
][
tilesY
*
tilesX
];
...
@@ -2212,30 +2650,34 @@ public class TileProcessor {
...
@@ -2212,30 +2650,34 @@ public class TileProcessor {
// dbg_img[ 4][i] = this_vert_disparity[i];
// dbg_img[ 4][i] = this_vert_disparity[i];
dbg_img
[
5
][
i
]
=
bg_tiles
[
i
]
?
1
:
-
1
;
dbg_img
[
5
][
i
]
=
bg_tiles
[
i
]
?
1
:
-
1
;
dbg_img
[
6
][
i
]
=
far_tiles
[
i
]
?
1
:
-
1
;
dbg_img
[
6
][
i
]
=
far_tiles
[
i
]
?
1
:
-
1
;
dbg_img
[
7
][
i
]
=
dbg_before_gaps
[
i
]
?
1
:
-
1
;
dbg_img
[
7
][
i
]
=
dbg_before_small
[
i
]
?
1
:
-
1
;
dbg_img
[
8
][
i
]
=
these_tiles
[
i
]
?
1
:
-
1
;
dbg_img
[
8
][
i
]
=
dbg_before_lone
[
i
]
?
1
:
-
1
;
dbg_img
[
9
][
i
]
=
near_tiles
[
i
]
?
1
:
-
1
;
dbg_img
[
10
][
i
]
=
block_propagate
[
i
]
?
1
:
-
1
;
dbg_img
[
11
][
i
]
=
this_strength
[
i
];
// dbg_img[12][i] = hor_strength[i];
// dbg_img[13][i] = vert_strength[i];
dbg_img
[
14
][
i
]
=
these_diffs
[
0
][
i
];
dbg_img
[
9
][
i
]
=
dbg_before_gaps
[
i
]
?
1
:
-
1
;
dbg_img
[
15
][
i
]
=
these_diffs
[
1
][
i
];
dbg_img
[
10
][
i
]
=
these_tiles
[
i
]
?
1
:
-
1
;
dbg_img
[
16
][
i
]
=
these_diffs
[
2
][
i
];
dbg_img
[
11
][
i
]
=
near_tiles
[
i
]
?
1
:
-
1
;
dbg_img
[
17
][
i
]
=
these_diffs
[
3
][
i
];
dbg_img
[
12
][
i
]
=
block_propagate
[
i
]
?
1
:
-
1
;
dbg_img
[
18
][
i
]
=
enum_clusters
[
i
];
dbg_img
[
13
][
i
]
=
this_strength
[
i
];
// dbg_img[14][i] = hor_strength[i];
// dbg_img[15][i] = vert_strength[i];
dbg_img
[
16
][
i
]
=
these_diffs
[
0
][
i
];
dbg_img
[
17
][
i
]
=
these_diffs
[
1
][
i
];
dbg_img
[
18
][
i
]
=
these_diffs
[
2
][
i
];
dbg_img
[
19
][
i
]
=
these_diffs
[
3
][
i
];
dbg_img
[
20
][
i
]
=
enum_clusters
[
i
];
}
}
dbg_img
[
2
]
=
scan
.
getDisparity
(
1
);
dbg_img
[
2
]
=
scan
.
getDisparity
(
1
);
dbg_img
[
3
]
=
scan
.
getDisparity
(
2
);
dbg_img
[
3
]
=
scan
.
getDisparity
(
2
);
dbg_img
[
4
]
=
scan
.
getDisparity
(
3
);
dbg_img
[
4
]
=
scan
.
getDisparity
(
3
);
dbg_img
[
1
2
]
=
scan
.
getHorStrength
();
dbg_img
[
1
4
]
=
scan
.
getHorStrength
();
dbg_img
[
1
3
]
=
scan
.
getVertStrength
();
dbg_img
[
1
5
]
=
scan
.
getVertStrength
();
dbg_img
[
19
]
=
scan
.
disparity_map
[
ImageDtt
.
DISPARITY_INDEX_CM
];
dbg_img
[
21
]
=
scan
.
disparity_map
[
ImageDtt
.
DISPARITY_INDEX_CM
];
dbg_img
[
2
0
]
=
scan
.
disparity_map
[
ImageDtt
.
DISPARITY_INDEX_POLY
];
dbg_img
[
2
2
]
=
scan
.
disparity_map
[
ImageDtt
.
DISPARITY_INDEX_POLY
];
dbg_img
[
2
1
]
=
scan
.
disparity_map
[
ImageDtt
.
DISPARITY_INDEX_HOR
];
dbg_img
[
2
3
]
=
scan
.
disparity_map
[
ImageDtt
.
DISPARITY_INDEX_HOR
];
dbg_img
[
2
2
]
=
scan
.
disparity_map
[
ImageDtt
.
DISPARITY_INDEX_VERT
];
dbg_img
[
2
4
]
=
scan
.
disparity_map
[
ImageDtt
.
DISPARITY_INDEX_VERT
];
sdfa_instance
.
showArrays
(
dbg_img
,
tilesX
,
tilesY
,
true
,
"FilterScan"
+
clt_3d_passes
.
size
(),
titles
);
sdfa_instance
.
showArrays
(
dbg_img
,
tilesX
,
tilesY
,
true
,
"FilterScan"
+
clt_3d_passes
.
size
(),
titles
);
}
}
...
@@ -2537,7 +2979,7 @@ public class TileProcessor {
...
@@ -2537,7 +2979,7 @@ public class TileProcessor {
if
(!
these_tiles
[
i
])
this_disparity_masked
[
i
]
=
Double
.
NaN
;
if
(!
these_tiles
[
i
])
this_disparity_masked
[
i
]
=
Double
.
NaN
;
}
}
if
(
sdfa_instance
!=
null
)
{
if
(
(
sdfa_instance
!=
null
)
&&
clt_parameters
.
show_bgnd_nonbgnd
)
{
int
[]
enum_clusters
=
enumerateClusters
(
int
[]
enum_clusters
=
enumerateClusters
(
true
,
// boolean diag_en,
true
,
// boolean diag_en,
...
@@ -2592,6 +3034,7 @@ public class TileProcessor {
...
@@ -2592,6 +3034,7 @@ public class TileProcessor {
// final double [][][] image_data, // first index - number of image in a quad
// final double [][][] image_data, // first index - number of image in a quad
EyesisCorrectionParameters
.
CLTParameters
clt_parameters
,
EyesisCorrectionParameters
.
CLTParameters
clt_parameters
,
// disparity range - differences from
// disparity range - differences from
boolean
use_supertiles
,
int
bg_scan_index
,
int
bg_scan_index
,
double
disparity_far
,
//
double
disparity_far
,
//
double
disparity_near
,
//
double
disparity_near
,
//
...
@@ -2613,7 +3056,13 @@ public class TileProcessor {
...
@@ -2613,7 +3056,13 @@ public class TileProcessor {
int
[]
replaced0
=
null
;
// +1 - hor, +2 - vert
int
[]
replaced0
=
null
;
// +1 - hor, +2 - vert
// if (clt_parameters.or_hor || clt_parameters.or_vert) {
// if (clt_parameters.or_hor || clt_parameters.or_vert) {
// TODO: add filtering before/after
// TODO: add filtering before/after
String
[]
dbg_titles
=
{
double
[][]
dbg_img
=
null
;
String
[]
dbg_titles
=
null
;
boolean
show_ortho
=
clt_parameters
.
show_ortho_combine
||
(
debugLevel
>
1
);
boolean
show_super
=
clt_parameters
.
show_refine_supertiles
||
(
debugLevel
>
1
);
boolean
show_st
=
clt_parameters
.
stShow
||
(
debugLevel
>
1
);
if
(
show_ortho
){
String
[]
dbg_titles0
=
{
"combo_disparity"
,
// 0
"combo_disparity"
,
// 0
"orig_disparity"
,
// 1
"orig_disparity"
,
// 1
"hor_disparity"
,
// 2
"hor_disparity"
,
// 2
...
@@ -2630,7 +3079,8 @@ public class TileProcessor {
...
@@ -2630,7 +3079,8 @@ public class TileProcessor {
"replaced"
,
// 13
"replaced"
,
// 13
"selection"
,
// 14
"selection"
,
// 14
"tilesHor"
};
// 15
"tilesHor"
};
// 15
double
[][]
dbg_img
=
new
double
[
dbg_titles
.
length
][];
dbg_titles
=
dbg_titles0
;
dbg_img
=
new
double
[
dbg_titles
.
length
][];
dbg_img
[
1
]
=
scan_prev
.
getDisparity
(
1
).
clone
();
dbg_img
[
1
]
=
scan_prev
.
getDisparity
(
1
).
clone
();
dbg_img
[
3
]
=
scan_prev
.
getDisparity
(
2
).
clone
();
dbg_img
[
3
]
=
scan_prev
.
getDisparity
(
2
).
clone
();
dbg_img
[
5
]
=
scan_prev
.
getDisparity
(
3
).
clone
();
dbg_img
[
5
]
=
scan_prev
.
getDisparity
(
3
).
clone
();
...
@@ -2639,7 +3089,7 @@ public class TileProcessor {
...
@@ -2639,7 +3089,7 @@ public class TileProcessor {
dbg_img
[
11
]
=
scan_prev
.
getVertStrength
().
clone
();
dbg_img
[
11
]
=
scan_prev
.
getVertStrength
().
clone
();
dbg_img
[
14
]
=
new
double
[
scan_prev
.
getDisparity
().
length
];
dbg_img
[
14
]
=
new
double
[
scan_prev
.
getDisparity
().
length
];
dbg_img
[
15
]
=
new
double
[
scan_prev
.
getDisparity
().
length
];
dbg_img
[
15
]
=
new
double
[
scan_prev
.
getDisparity
().
length
];
}
replaced
=
combineOrthoDisparity
(
replaced
=
combineOrthoDisparity
(
scan_prev
,
// final CLTPass3d scan, // scan data
scan_prev
,
// final CLTPass3d scan, // scan data
clt_parameters
.
or_hor
,
// true; // Apply ortho correction to horizontal correlation (vertical features)
clt_parameters
.
or_hor
,
// true; // Apply ortho correction to horizontal correlation (vertical features)
...
@@ -2657,20 +3107,23 @@ public class TileProcessor {
...
@@ -2657,20 +3107,23 @@ public class TileProcessor {
if
(
clt_parameters
.
poles_fix
)
{
if
(
clt_parameters
.
poles_fix
)
{
boolean
[]
selection
=
new
boolean
[
replaced
.
length
];
boolean
[]
selection
=
new
boolean
[
replaced
.
length
];
boolean
[]
tilesHor
=
new
boolean
[
replaced
.
length
];
boolean
[]
tilesHor
=
new
boolean
[
replaced
.
length
];
boolean
[]
bg_sel
=
scan_bg
.
selected
;
double
[]
disparity
=
scan_prev
.
getDisparity
();
double
[]
disparity
=
scan_prev
.
getDisparity
();
for
(
int
i
=
0
;
i
<
tilesHor
.
length
;
i
++){
for
(
int
i
=
0
;
i
<
tilesHor
.
length
;
i
++){
tilesHor
[
i
]
=
(
replaced
[
i
]
&
1
)
!=
0
;
tilesHor
[
i
]
=
(
replaced
[
i
]
&
1
)
!=
0
;
// selection[i] = !bg_sel[i] && !Double.isNaN(disparity[i]) && (disparity[i] >= disparity_far) && (disparity[i] <= disparity_near);
selection
[
i
]
=
!
Double
.
isNaN
(
disparity
[
i
])
&&
(
disparity
[
i
]
>=
disparity_far
)
&&
(
disparity
[
i
]
<=
disparity_near
);
selection
[
i
]
=
!
Double
.
isNaN
(
disparity
[
i
])
&&
(
disparity
[
i
]
>=
disparity_far
)
&&
(
disparity
[
i
]
<=
disparity_near
);
}
if
(
show_ortho
){
for
(
int
i
=
0
;
i
<
tilesHor
.
length
;
i
++){
dbg_img
[
14
][
i
]
=
selection
[
i
]?
1.0
:
0.0
;
dbg_img
[
14
][
i
]
=
selection
[
i
]?
1.0
:
0.0
;
dbg_img
[
15
][
i
]
=
tilesHor
[
i
]?
1.0
:
0.0
;
dbg_img
[
15
][
i
]
=
tilesHor
[
i
]?
1.0
:
0.0
;
}
}
}
int
numFixed
=
fixVerticalPoles
(
// return number of replaced cells
int
numFixed
=
fixVerticalPoles
(
// return number of replaced cells
scan_prev
,
// CLTPass3d scan, // scan data to use
scan_prev
,
// CLTPass3d scan, // scan data to use
selection
,
// start with only from selections (if not null, continue regardless)
selection
,
// start with only from selections (if not null, continue regardless)
tilesHor
,
// horizontal correlation tiles used for composite disparity/strength;
tilesHor
,
// horizontal correlation tiles used for composite disparity/strength;
clt_parameters
.
poles_len
,
// int max_len, // maximal length to cover
clt_parameters
.
poles_len
,
// int max_len, // maximal length to cover
clt_parameters
.
poles_ratio
,
// Maximal ratio of invisible to visible pole length
clt_parameters
.
poles_min_strength
,
// double min_new_strength, // set strength to hor_strength, but not less than this
clt_parameters
.
poles_min_strength
,
// double min_new_strength, // set strength to hor_strength, but not less than this
clt_parameters
.
poles_force_disp
,
// boolean force_disparity // copy disparity down (false - use horDisparity
clt_parameters
.
poles_force_disp
,
// boolean force_disparity // copy disparity down (false - use horDisparity
true
);
true
);
...
@@ -2683,6 +3136,7 @@ public class TileProcessor {
...
@@ -2683,6 +3136,7 @@ public class TileProcessor {
}
}
}
}
if
(
show_ortho
){
dbg_img
[
0
]
=
scan_prev
.
getDisparity
(
0
);
dbg_img
[
0
]
=
scan_prev
.
getDisparity
(
0
);
dbg_img
[
2
]
=
scan_prev
.
getDisparity
(
2
);
dbg_img
[
2
]
=
scan_prev
.
getDisparity
(
2
);
...
@@ -2700,6 +3154,7 @@ public class TileProcessor {
...
@@ -2700,6 +3154,7 @@ public class TileProcessor {
dbg_img
[
12
]
=
dreplaced0
;
dbg_img
[
12
]
=
dreplaced0
;
dbg_img
[
13
]
=
dreplaced
;
dbg_img
[
13
]
=
dreplaced
;
sdfa_instance
.
showArrays
(
dbg_img
,
tilesX
,
tilesY
,
true
,
"ortho_combine"
,
dbg_titles
);
sdfa_instance
.
showArrays
(
dbg_img
,
tilesX
,
tilesY
,
true
,
"ortho_combine"
,
dbg_titles
);
}
boolean
[]
these_tiles
=
FilterScan
(
boolean
[]
these_tiles
=
FilterScan
(
scan_prev
,
// final CLTPass3d scan,
scan_prev
,
// final CLTPass3d scan,
...
@@ -2711,14 +3166,14 @@ public class TileProcessor {
...
@@ -2711,14 +3166,14 @@ public class TileProcessor {
this_maybe
,
// final double this_maybe, // maximal strength to ignore as non-background
this_maybe
,
// final double this_maybe, // maximal strength to ignore as non-background
sure_smth
,
// final double sure_smth, // if 2-nd worst image difference (noise-normalized) exceeds this - do not propagate bgnd
sure_smth
,
// final double sure_smth, // if 2-nd worst image difference (noise-normalized) exceeds this - do not propagate bgnd
clt_parameters
,
clt_parameters
,
// final int threadsMax, // maximal number of threads to launch
// final int threadsMax, // maximal number of threads to launch
// final boolean updateStatus,
// final boolean updateStatus,
debugLevel
);
debugLevel
);
// }
// }
/*
/*
boolean [] these_tiles = combineHorVertDisparity(
boolean [] these_tiles = combineHorVertDisparity(
scan_prev, // final CLTPass3d scan,
scan_prev, // final CLTPass3d scan,
...
@@ -2732,7 +3187,7 @@ public class TileProcessor {
...
@@ -2732,7 +3187,7 @@ public class TileProcessor {
debugLevel);
debugLevel);
scan_prev.combineHorVertStrength(true, false); // strength now max of original and horizontal. Use scale instead of boolean?
scan_prev.combineHorVertStrength(true, false); // strength now max of original and horizontal. Use scale instead of boolean?
*/
*/
...
@@ -2744,6 +3199,11 @@ public class TileProcessor {
...
@@ -2744,6 +3199,11 @@ public class TileProcessor {
// if (clt_parameters.stShow){
// if (clt_parameters.stShow){
// try renovated supertiles. Do twice to show both original and blured histograms
// try renovated supertiles. Do twice to show both original and blured histograms
double
[]
dbg_orig_disparity
=
null
;
double
[]
dbg_with_super_disp
=
null
;
double
[]
dbg_outlayers
=
null
;
if
(
use_supertiles
)
{
String
[]
dbg_st_titles
=
{
"raw"
,
"blurred"
+
clt_parameters
.
stSigma
,
"max-min-max"
};
String
[]
dbg_st_titles
=
{
"raw"
,
"blurred"
+
clt_parameters
.
stSigma
,
"max-min-max"
};
double
[][]
dbg_hist
=
new
double
[
dbg_st_titles
.
length
][];
double
[][]
dbg_hist
=
new
double
[
dbg_st_titles
.
length
][];
...
@@ -2769,7 +3229,7 @@ public class TileProcessor {
...
@@ -2769,7 +3229,7 @@ public class TileProcessor {
int
hist_width0
=
scan_prev
.
showDisparityHistogramWidth
();
int
hist_width0
=
scan_prev
.
showDisparityHistogramWidth
();
int
hist_height0
=
dbg_hist
[
0
].
length
/
hist_width0
;
int
hist_height0
=
dbg_hist
[
0
].
length
/
hist_width0
;
if
(
clt_parameters
.
stShow
){
if
(
show_st
){
sdfa_instance
.
showArrays
(
dbg_hist
,
hist_width0
,
hist_height0
,
true
,
"disparity_supertiles_histograms"
,
dbg_st_titles
);
sdfa_instance
.
showArrays
(
dbg_hist
,
hist_width0
,
hist_height0
,
true
,
"disparity_supertiles_histograms"
,
dbg_st_titles
);
}
}
...
@@ -2777,25 +3237,29 @@ public class TileProcessor {
...
@@ -2777,25 +3237,29 @@ public class TileProcessor {
clt_parameters
.
stMinBgDisparity
,
// final double minBgDisparity,
clt_parameters
.
stMinBgDisparity
,
// final double minBgDisparity,
clt_parameters
.
stMinBgFract
);
// final double minBgFract);
clt_parameters
.
stMinBgFract
);
// final double minBgFract);
double
[]
dbg_orig_disparity
=
scan_prev
.
getDisparity
().
clone
();
dbg_orig_disparity
=
scan_prev
.
getDisparity
().
clone
();
// combine weak with supertiles
// combine weak with supertiles
double
[]
dbg_with_super_disp
=
scan_prev
.
combineSuper
(
clt_parameters
.
stUseDisp
);
dbg_with_super_disp
=
scan_prev
.
combineSuper
(
clt_parameters
.
stUseDisp
);
if
(
dbg_with_super_disp
!=
null
)
dbg_with_super_disp
=
dbg_with_super_disp
.
clone
();
// else no super disparity available
if
(
dbg_with_super_disp
!=
null
)
dbg_with_super_disp
=
dbg_with_super_disp
.
clone
();
// else no super disparity available
}
// replace weak outlaye tiles with weighted averages (modifies disparity)
// replace weak outlaye tiles with weighted averages (modifies disparity)
boolean
[]
outlayers
=
scan_prev
.
replaceWeakOutlayers
(
boolean
[]
outlayers
=
scan_prev
.
replaceWeakOutlayers
(
null
,
// final boolean [] selection,
null
,
// final boolean [] selection,
clt_parameters
.
outlayerStrength
,
//final double weakStrength, // strength to be considered weak, subject to this replacement
clt_parameters
.
outlayerStrength
,
//final double weakStrength, // strength to be considered weak, subject to this replacement
clt_parameters
.
outlayerDiff
,
// final double maxDiff)
clt_parameters
.
outlayerDiff
,
// final double maxDiff)
clt_parameters
.
outlayerDiffPos
,
// final double maxDiff)
clt_parameters
.
outlayerDiffNeg
,
// final double maxDiff)
0.5
*
disparity_far
,
0.5
*
disparity_far
,
2.0
*
disparity_near
);
2.0
*
disparity_near
);
d
ouble
[]
d
bg_outlayers
=
new
double
[
outlayers
.
length
];
dbg_outlayers
=
new
double
[
outlayers
.
length
];
for
(
int
i
=
0
;
i
<
outlayers
.
length
;
i
++){
for
(
int
i
=
0
;
i
<
outlayers
.
length
;
i
++){
dbg_outlayers
[
i
]
=
outlayers
[
i
]?
1.0
:
0.0
;
dbg_outlayers
[
i
]
=
outlayers
[
i
]?
1.0
:
0.0
;
}
}
// set disparity for border pixels (may be overkill)
// set disparity for border pixels (may be overkill)
DisparityProcessor
dp
=
new
DisparityProcessor
(
this
,
clt_parameters
.
transform_size
*
geometryCorrection
.
getScaleDzDx
());
DisparityProcessor
dp
=
new
DisparityProcessor
(
this
,
clt_parameters
.
transform_size
*
geometryCorrection
.
getScaleDzDx
());
boolean
[]
grown
=
these_tiles
.
clone
();
boolean
[]
grown
=
these_tiles
.
clone
();
...
@@ -2809,23 +3273,19 @@ public class TileProcessor {
...
@@ -2809,23 +3273,19 @@ public class TileProcessor {
int
[]
neighbors
=
dp
.
getNeighbors
(
// creates neighbors mask from bitmask
int
[]
neighbors
=
dp
.
getNeighbors
(
// creates neighbors mask from bitmask
grown
,
// these_tiles, // grown, // these_tiles, // boolean [] selected,
grown
,
// these_tiles, // grown, // these_tiles, // boolean [] selected,
tilesX
);
tilesX
);
// int [] neighbors_orig = neighbors.clone();
if
(
clt_parameters
.
show_neighbors
)
{
double
[]
dbg_neib
=
dp
.
dbgShowNeighbors
(
double
[]
dbg_neib
=
dp
.
dbgShowNeighbors
(
grown
,
// these_tiles, // grown, // these_tiles,
grown
,
// these_tiles, // grown, // these_tiles,
neighbors
,
// _orig, // int [] neighbors,
neighbors
,
// _orig, // int [] neighbors,
clt_parameters
.
transform_size
,
// int tile_size,
clt_parameters
.
transform_size
,
// int tile_size,
-
1.0
,
// double bgnd,
-
1.0
,
// double bgnd,
1.0
);
// double fgnd)
1.0
);
// double fgnd)
// double [] new_disparity = this_disparity.clone();
// double [][]dbgDeriv = new double [2][]; // [these_tiles.length];
sdfa_instance
.
showArrays
(
dbg_neib
,
tilesX
*
clt_parameters
.
transform_size
,
tilesY
*
clt_parameters
.
transform_size
,
"XXneighbors"
);
sdfa_instance
.
showArrays
(
dbg_neib
,
tilesX
*
clt_parameters
.
transform_size
,
tilesY
*
clt_parameters
.
transform_size
,
"XXneighbors"
);
}
dp
.
smoothDisparity
(
dp
.
smoothDisparity
(
clt_parameters
.
tiDispPull
,
// final double dispPull, // clt_parameters.tiDispPull or 0.0
clt_parameters
.
tiDispPull
,
// final double dispPull, // clt_parameters.tiDispPull or 0.0
3
,
// 2, // 3, // final int mask, // 1 - work on internal elements, 2 - on border elements, 3 - both (internal first);
2
,
// 2, // 3, // final int mask, // 1 - work on internal elements, 2 - on border elements, 3 - both (internal first);
clt_parameters
.
tiIterations
,
// final int num_passes,
clt_parameters
.
tiIterations
,
// final int num_passes,
Math
.
pow
(
10.0
,
-
clt_parameters
.
tiPrecision
),
// final double maxDiff, // maximal change in any of the disparity values
Math
.
pow
(
10.0
,
-
clt_parameters
.
tiPrecision
),
// final double maxDiff, // maximal change in any of the disparity values
neighbors
,
// final int [] neighbors, // +1 - up (N), +2 - up-right - NE, ... +0x80 - NW
neighbors
,
// final int [] neighbors, // +1 - up (N), +2 - up-right - NE, ... +0x80 - NW
...
@@ -2839,31 +3299,11 @@ public class TileProcessor {
...
@@ -2839,31 +3299,11 @@ public class TileProcessor {
clt_parameters
,
clt_parameters
,
threadsMax
,
// maximal number of threads to launch
threadsMax
,
// maximal number of threads to launch
debugLevel
);
debugLevel
);
/*
/*
double [] measured_disparity = dp.dbgRescaleToPixels(
double [] measured_disparity = dp.dbgRescaleToPixels(
this_disparity,
this_disparity,
clt_parameters.transform_size); // int tile_size)
clt_parameters.transform_size); // int tile_size)
*/
*/
double
[]
masked_filtered
=
scan_prev
.
getDisparity
().
clone
();
for
(
int
i
=
0
;
i
<
masked_filtered
.
length
;
i
++){
if
(!
grown
[
i
])
masked_filtered
[
i
]
=
Double
.
NaN
;
}
// if (clt_parameters.stShow){
String
[]
dbg_disp_tiltes
={
"masked"
,
"filtered"
,
"disp_combo"
,
"disparity"
,
"st_disparity"
,
"strength"
,
"st_strength"
,
"outlayers"
};
double
[][]
dbg_disp
=
new
double
[
dbg_disp_tiltes
.
length
][];
dbg_disp
[
0
]
=
masked_filtered
;
dbg_disp
[
1
]
=
scan_prev
.
getDisparity
();
dbg_disp
[
2
]
=
dbg_with_super_disp
;
dbg_disp
[
3
]
=
dbg_orig_disparity
;
dbg_disp
[
4
]
=
scan_prev
.
getBgDisparity
();
dbg_disp
[
5
]
=
scan_prev
.
getStrength
();
dbg_disp
[
6
]
=
scan_prev
.
getBgStrength
();
dbg_disp
[
7
]
=
dbg_outlayers
;
sdfa_instance
.
showArrays
(
dbg_disp
,
tilesX
,
tilesY
,
true
,
"refine_disparity_supertiles"
+
clt_3d_passes
.
size
(),
dbg_disp_tiltes
);
// }
// prepare new task and run
// prepare new task and run
double
[][]
disparityTask
=
new
double
[
tilesY
][
tilesX
];
double
[][]
disparityTask
=
new
double
[
tilesY
][
tilesX
];
int
[][]
tile_op
=
new
int
[
tilesY
][
tilesX
];
int
[][]
tile_op
=
new
int
[
tilesY
][
tilesX
];
...
@@ -2884,10 +3324,40 @@ public class TileProcessor {
...
@@ -2884,10 +3324,40 @@ public class TileProcessor {
borderTiles
[
indx
]
=
false
;
borderTiles
[
indx
]
=
false
;
}
}
}
}
double
[]
masked_filtered
=
scan_prev
.
getDisparity
().
clone
();
for
(
int
i
=
0
;
i
<
masked_filtered
.
length
;
i
++){
if
(!
grown
[
i
])
masked_filtered
[
i
]
=
Double
.
NaN
;
}
if
(
show_super
){
String
[]
dbg_disp_tiltes
={
"masked"
,
"filtered"
,
"disp_combo"
,
"disparity"
,
"st_disparity"
,
"strength"
,
"st_strength"
,
"outlayers"
,
"these"
,
"border"
,
"border_tiles"
};
double
[][]
dbg_disp
=
new
double
[
dbg_disp_tiltes
.
length
][];
dbg_disp
[
0
]
=
masked_filtered
;
dbg_disp
[
1
]
=
scan_prev
.
getDisparity
();
dbg_disp
[
2
]
=
dbg_with_super_disp
;
dbg_disp
[
3
]
=
dbg_orig_disparity
;
dbg_disp
[
4
]
=
scan_prev
.
getBgDisparity
();
dbg_disp
[
5
]
=
scan_prev
.
getStrength
();
dbg_disp
[
6
]
=
scan_prev
.
getBgStrength
();
dbg_disp
[
7
]
=
dbg_outlayers
;
dbg_disp
[
8
]
=
new
double
[
masked_filtered
.
length
];
dbg_disp
[
9
]
=
new
double
[
masked_filtered
.
length
];
dbg_disp
[
10
]
=
new
double
[
masked_filtered
.
length
];
for
(
int
i
=
0
;
i
<
dbg_disp
[
8
].
length
;
i
++){
dbg_disp
[
8
][
i
]
=
these_tiles
[
i
]?
1.0
:
0.0
;
dbg_disp
[
9
][
i
]
=
border
[
i
]?
1.0
:
0.0
;
dbg_disp
[
10
][
i
]
=
borderTiles
[
i
]?
1.0
:
0.0
;
}
sdfa_instance
.
showArrays
(
dbg_disp
,
tilesX
,
tilesY
,
true
,
"refine_disparity_supertiles"
+
clt_3d_passes
.
size
(),
dbg_disp_tiltes
);
}
CLTPass3d
scan_next
=
new
CLTPass3d
();
CLTPass3d
scan_next
=
new
CLTPass3d
();
scan_next
.
disparity
=
disparityTask
;
scan_next
.
disparity
=
disparityTask
;
scan_next
.
tile_op
=
tile_op
;
scan_next
.
tile_op
=
tile_op
;
scan_next
.
border_tiles
=
borderTiles
;
scan_next
.
border_tiles
=
borderTiles
;
scan_next
.
selected
=
grown
;
// includes border_tiles
clt_3d_passes
.
add
(
scan_next
);
clt_3d_passes
.
add
(
scan_next
);
// }
// }
return
scan_next
;
return
scan_next
;
...
@@ -2900,6 +3370,7 @@ public class TileProcessor {
...
@@ -2900,6 +3370,7 @@ public class TileProcessor {
public
void
secondPassSetup
(
// prepare tile tasks for the second pass based on the previous one(s)
public
void
secondPassSetup
(
// prepare tile tasks for the second pass based on the previous one(s)
// final double [][][] image_data, // first index - number of image in a quad
// final double [][][] image_data, // first index - number of image in a quad
EyesisCorrectionParameters
.
CLTParameters
clt_parameters
,
EyesisCorrectionParameters
.
CLTParameters
clt_parameters
,
boolean
use_supertiles
,
int
bg_scan_index
,
int
bg_scan_index
,
// disparity range - differences from
// disparity range - differences from
double
disparity_far
,
//
double
disparity_far
,
//
...
@@ -2942,6 +3413,12 @@ public class TileProcessor {
...
@@ -2942,6 +3413,12 @@ public class TileProcessor {
// if (clt_parameters.stShow){
// if (clt_parameters.stShow){
// try renovated supertiles. Do twice to show both original and blured histograms
// try renovated supertiles. Do twice to show both original and blured histograms
double
[]
dbg_orig_disparity
=
null
;
double
[]
dbg_with_super_disp
=
null
;
double
[]
dbg_outlayers
=
null
;
boolean
[]
grown
=
these_tiles
.
clone
();
if
(
use_supertiles
)
{
String
[]
dbg_st_titles
=
{
"raw"
,
"blurred"
+
clt_parameters
.
stSigma
,
"max-min-max"
};
String
[]
dbg_st_titles
=
{
"raw"
,
"blurred"
+
clt_parameters
.
stSigma
,
"max-min-max"
};
double
[][]
dbg_hist
=
new
double
[
dbg_st_titles
.
length
][];
double
[][]
dbg_hist
=
new
double
[
dbg_st_titles
.
length
][];
...
@@ -2974,24 +3451,33 @@ public class TileProcessor {
...
@@ -2974,24 +3451,33 @@ public class TileProcessor {
clt_parameters
.
stMinBgDisparity
,
// final double minBgDisparity,
clt_parameters
.
stMinBgDisparity
,
// final double minBgDisparity,
clt_parameters
.
stMinBgFract
);
// final double minBgFract);
clt_parameters
.
stMinBgFract
);
// final double minBgFract);
boolean
[]
st_grown
=
these_tiles
.
clone
();
//
st_grown = these_tiles.clone();
growTiles
(
growTiles
(
2
,
// grow tile selection by 1 over non-background tiles 1: 4 directions, 2 - 8 directions, 3 - 8 by 1, 4 by 1 more
2
,
// grow tile selection by 1 over non-background tiles 1: 4 directions, 2 - 8 directions, 3 - 8 by 1, 4 by 1 more
st_
grown
,
// boolean [] tiles,
grown
,
// boolean [] tiles,
null
);
// boolean [] prohibit)
null
);
// boolean [] prohibit)
double
[]
dbg_orig_disparity
=
scan_prev
.
getDisparity
().
clone
();
dbg_orig_disparity
=
scan_prev
.
getDisparity
().
clone
();
// combine weak with supertiles
// combine weak with supertiles
double
[]
dbg_with_super_disp
=
scan_prev
.
combineSuper
(
clt_parameters
.
stUseDisp
);
dbg_with_super_disp
=
scan_prev
.
combineSuper
(
clt_parameters
.
stUseDisp
);
if
(
dbg_with_super_disp
!=
null
)
dbg_with_super_disp
=
dbg_with_super_disp
.
clone
();
// else no super disparity available
if
(
dbg_with_super_disp
!=
null
)
dbg_with_super_disp
=
dbg_with_super_disp
.
clone
();
// else no super disparity available
}
else
{
growTiles
(
2
,
// grow tile selection by 1 over non-background tiles 1: 4 directions, 2 - 8 directions, 3 - 8 by 1, 4 by 1 more
grown
,
// boolean [] tiles,
null
);
// boolean [] prohibit)
}
// replace weak outlaye tiles with weighted averages (modifies disparity)
// replace weak outlaye tiles with weighted averages (modifies disparity)
boolean
[]
outlayers
=
scan_prev
.
replaceWeakOutlayers
(
boolean
[]
outlayers
=
scan_prev
.
replaceWeakOutlayers
(
null
,
// final boolean [] selection,
null
,
// final boolean [] selection,
clt_parameters
.
outlayerStrength
,
//final double weakStrength, // strength to be considered weak, subject to this replacement
clt_parameters
.
outlayerStrength
,
//final double weakStrength, // strength to be considered weak, subject to this replacement
clt_parameters
.
outlayerDiff
,
// final double maxDiff)
clt_parameters
.
outlayerDiff
,
// final double maxDiff)
clt_parameters
.
outlayerDiffPos
,
// final double maxDiff)
clt_parameters
.
outlayerDiffNeg
,
// final double maxDiff)
0.5
*
disparity_far
,
0.5
*
disparity_far
,
2.0
*
disparity_near
);
2.0
*
disparity_near
);
d
ouble
[]
d
bg_outlayers
=
new
double
[
outlayers
.
length
];
dbg_outlayers
=
new
double
[
outlayers
.
length
];
for
(
int
i
=
0
;
i
<
outlayers
.
length
;
i
++){
for
(
int
i
=
0
;
i
<
outlayers
.
length
;
i
++){
dbg_outlayers
[
i
]
=
outlayers
[
i
]?
1.0
:
0.0
;
dbg_outlayers
[
i
]
=
outlayers
[
i
]?
1.0
:
0.0
;
...
@@ -2999,7 +3485,7 @@ public class TileProcessor {
...
@@ -2999,7 +3485,7 @@ public class TileProcessor {
double
[]
masked_filtered
=
scan_prev
.
getDisparity
().
clone
();
double
[]
masked_filtered
=
scan_prev
.
getDisparity
().
clone
();
for
(
int
i
=
0
;
i
<
masked_filtered
.
length
;
i
++){
for
(
int
i
=
0
;
i
<
masked_filtered
.
length
;
i
++){
if
(!
st_grown
[
i
])
masked_filtered
[
i
]
=
Double
.
NaN
;
if
(!
grown
[
i
])
masked_filtered
[
i
]
=
Double
.
NaN
;
}
}
if
(
clt_parameters
.
stShow
){
if
(
clt_parameters
.
stShow
){
String
[]
dbg_disp_tiltes
={
"masked"
,
"filtered"
,
"disp_combo"
,
"disparity"
,
"st_disparity"
,
"strength"
,
"st_strength"
,
"outlayers"
};
String
[]
dbg_disp_tiltes
={
"masked"
,
"filtered"
,
"disp_combo"
,
"disparity"
,
"st_disparity"
,
"strength"
,
"st_strength"
,
"outlayers"
};
...
@@ -3024,7 +3510,7 @@ public class TileProcessor {
...
@@ -3024,7 +3510,7 @@ public class TileProcessor {
double
[]
prev_disparity
=
scan_prev
.
getDisparity
();
double
[]
prev_disparity
=
scan_prev
.
getDisparity
();
for
(
int
ty
=
0
;
ty
<
tilesY
;
ty
++)
for
(
int
tx
=
0
;
tx
<
tilesX
;
tx
++){
for
(
int
ty
=
0
;
ty
<
tilesY
;
ty
++)
for
(
int
tx
=
0
;
tx
<
tilesX
;
tx
++){
int
indx
=
tilesX
*
ty
+
tx
;
int
indx
=
tilesX
*
ty
+
tx
;
if
(
st_
grown
[
indx
])
{
if
(
grown
[
indx
])
{
borderTiles
[
indx
]
=
!
these_tiles
[
indx
];
borderTiles
[
indx
]
=
!
these_tiles
[
indx
];
disparityTask
[
ty
][
tx
]
=
prev_disparity
[
indx
];
disparityTask
[
ty
][
tx
]
=
prev_disparity
[
indx
];
tile_op
[
ty
][
tx
]
=
op
;
tile_op
[
ty
][
tx
]
=
op
;
...
@@ -3047,7 +3533,7 @@ public class TileProcessor {
...
@@ -3047,7 +3533,7 @@ public class TileProcessor {
//clt_parameters.transform_size;
//clt_parameters.transform_size;
DisparityProcessor
dp
=
new
DisparityProcessor
(
this
,
clt_parameters
.
transform_size
*
geometryCorrection
.
getScaleDzDx
());
DisparityProcessor
dp
=
new
DisparityProcessor
(
this
,
clt_parameters
.
transform_size
*
geometryCorrection
.
getScaleDzDx
());
/*
boolean [] grown = these_tiles.clone();
boolean [] grown = these_tiles.clone();
growTiles(
growTiles(
...
@@ -3055,7 +3541,7 @@ public class TileProcessor {
...
@@ -3055,7 +3541,7 @@ public class TileProcessor {
2, // grow tile selection by 1 over non-background tiles 1: 4 directions, 2 - 8 directions, 3 - 8 by 1, 4 by 1 more
2, // grow tile selection by 1 over non-background tiles 1: 4 directions, 2 - 8 directions, 3 - 8 by 1, 4 by 1 more
grown, // boolean [] tiles,
grown, // boolean [] tiles,
null); // boolean [] prohibit)
null); // boolean [] prohibit)
*/
boolean
[]
border
=
grown
.
clone
();
boolean
[]
border
=
grown
.
clone
();
for
(
int
i
=
0
;
i
<
border
.
length
;
i
++)
border
[
i
]
&=
!
these_tiles
[
i
];
for
(
int
i
=
0
;
i
<
border
.
length
;
i
++)
border
[
i
]
&=
!
these_tiles
[
i
];
...
@@ -3311,7 +3797,7 @@ public class TileProcessor {
...
@@ -3311,7 +3797,7 @@ public class TileProcessor {
else
disp_diff
[
2
][
i
]
=
Double
.
NaN
;
else
disp_diff
[
2
][
i
]
=
Double
.
NaN
;
}
}
if
(
clt_parameters
.
show_neighbors
)
{
int
numImages
=
2
+
3
*
clt_parameters
.
tiNumCycles
+
2
+
3
+
3
;
int
numImages
=
2
+
3
*
clt_parameters
.
tiNumCycles
+
2
+
3
+
3
;
String
[]
titles_all
=
new
String
[
numImages
];
String
[]
titles_all
=
new
String
[
numImages
];
double
[][]
dbg_img
=
new
double
[
numImages
][];
double
[][]
dbg_img
=
new
double
[
numImages
][];
...
@@ -3337,9 +3823,9 @@ public class TileProcessor {
...
@@ -3337,9 +3823,9 @@ public class TileProcessor {
dbg_img
[
indx
++]
=
dp
.
dbgRescaleToPixels
(
dbg_img
[
indx
++]
=
dp
.
dbgRescaleToPixels
(
dbg_far_near
,
dbg_far_near
,
clt_parameters
.
transform_size
);
clt_parameters
.
transform_size
);
// double [][] dbg_img = {dbg_neib, dbg_neib_broken, stress, stress1, measured_disparity, smooth_disparity,smooth_disparity1};
// double [][] dbg_img = {dbg_neib, dbg_neib_broken, stress, stress1, measured_disparity, smooth_disparity,smooth_disparity1};
// sdfa_instance.showArrays(dbg_neib,tilesX*clt_parameters.transform_size, tilesY*clt_parameters.transform_size,"neighbors");
// sdfa_instance.showArrays(dbg_neib,tilesX*clt_parameters.transform_size, tilesY*clt_parameters.transform_size,"neighbors");
titles_all
[
indx
]
=
"strength_orig"
;
titles_all
[
indx
]
=
"strength_orig"
;
dbg_img
[
indx
++]
=
dp
.
dbgRescaleToPixels
(
dbg_img
[
indx
++]
=
dp
.
dbgRescaleToPixels
(
true_strength
,
true_strength
,
...
@@ -3362,6 +3848,7 @@ public class TileProcessor {
...
@@ -3362,6 +3848,7 @@ public class TileProcessor {
clt_parameters
.
transform_size
);
clt_parameters
.
transform_size
);
sdfa_instance
.
showArrays
(
dbg_img
,
tilesX
*
clt_parameters
.
transform_size
,
tilesY
*
clt_parameters
.
transform_size
,
sdfa_instance
.
showArrays
(
dbg_img
,
tilesX
*
clt_parameters
.
transform_size
,
tilesY
*
clt_parameters
.
transform_size
,
true
,
"neighbors"
,
titles_all
);
true
,
"neighbors"
,
titles_all
);
}
//disp_diff
//disp_diff
//************************************************
//************************************************
int
[][]
flaps
=
dp
.
createOverlapGeometry
(
int
[][]
flaps
=
dp
.
createOverlapGeometry
(
...
@@ -3370,8 +3857,10 @@ public class TileProcessor {
...
@@ -3370,8 +3857,10 @@ public class TileProcessor {
border
,
// final boolean [] border,
border
,
// final boolean [] border,
threadsMax
,
// maximal number of threads to launch
threadsMax
,
// maximal number of threads to launch
debugLevel
);
debugLevel
);
String
[]
titleFlaps
=
{
"neib"
,
"N"
,
"NE"
,
"E"
,
"SE"
,
"S"
,
"SW"
,
"W"
,
"NW"
};
if
(
clt_parameters
.
show_flaps_dirs
){
double
[][]
dbg_flaps
=
dp
.
dbgShowOverlaps
(
double
[][]
dbg_flaps
=
dp
.
dbgShowOverlaps
(
// boolean [] selected,
// boolean [] selected,
flaps
,
// int [][] flaps,
flaps
,
// int [][] flaps,
clt_parameters
.
transform_size
,
// int tile_size,
clt_parameters
.
transform_size
,
// int tile_size,
-
1.0
,
// double bgnd,
-
1.0
,
// double bgnd,
...
@@ -3382,11 +3871,10 @@ public class TileProcessor {
...
@@ -3382,11 +3871,10 @@ public class TileProcessor {
clt_parameters
.
transform_size
,
// int tile_size,
clt_parameters
.
transform_size
,
// int tile_size,
-
1.0
,
// double bgnd,
-
1.0
,
// double bgnd,
1.0
);
// double fgnd)
1.0
);
// double fgnd)
String
[]
titleFlaps
=
{
"neib"
,
"N"
,
"NE"
,
"E"
,
"SE"
,
"S"
,
"SW"
,
"W"
,
"NW"
};
double
[][]
dbg_flaps_all
=
{
dbg_neibs
,
dbg_flaps
[
0
],
dbg_flaps
[
1
],
dbg_flaps
[
2
],
dbg_flaps
[
3
],
dbg_flaps
[
4
],
dbg_flaps
[
5
],
dbg_flaps
[
6
],
dbg_flaps
[
7
]};
double
[][]
dbg_flaps_all
=
{
dbg_neibs
,
dbg_flaps
[
0
],
dbg_flaps
[
1
],
dbg_flaps
[
2
],
dbg_flaps
[
3
],
dbg_flaps
[
4
],
dbg_flaps
[
5
],
dbg_flaps
[
6
],
dbg_flaps
[
7
]};
sdfa_instance
.
showArrays
(
dbg_flaps_all
,
tilesX
*
clt_parameters
.
transform_size
,
tilesY
*
clt_parameters
.
transform_size
,
sdfa_instance
.
showArrays
(
dbg_flaps_all
,
tilesX
*
clt_parameters
.
transform_size
,
tilesY
*
clt_parameters
.
transform_size
,
true
,
"flaps-dirs"
,
titleFlaps
);
true
,
"flaps-dirs"
,
titleFlaps
);
}
int
[][][]
clustersNO
=
dp
.
extractNonOlerlap
(
int
[][][]
clustersNO
=
dp
.
extractNonOlerlap
(
true
,
// diag_en,
true
,
// diag_en,
neighbors
,
// +1 - up (N), +2 - up-right - NE, ... +0x80 - NW
neighbors
,
// +1 - up (N), +2 - up-right - NE, ... +0x80 - NW
...
@@ -3394,6 +3882,7 @@ public class TileProcessor {
...
@@ -3394,6 +3882,7 @@ public class TileProcessor {
border
,
// border should be diagonal!
border
,
// border should be diagonal!
threadsMax
,
// maximal number of threads to launch
threadsMax
,
// maximal number of threads to launch
debugLevel
);
debugLevel
);
if
(
clt_parameters
.
show_first_clusters
){
int
dbg_max_cluster_show
=
50
;
int
dbg_max_cluster_show
=
50
;
if
(
dbg_max_cluster_show
>
clustersNO
.
length
)
dbg_max_cluster_show
=
clustersNO
.
length
;
if
(
dbg_max_cluster_show
>
clustersNO
.
length
)
dbg_max_cluster_show
=
clustersNO
.
length
;
...
@@ -3406,9 +3895,9 @@ public class TileProcessor {
...
@@ -3406,9 +3895,9 @@ public class TileProcessor {
}
}
}
}
// String [] titleFlaps = {"neib","N","NE","E","SE","S","SW","W","NW"};
// String [] titleFlaps = {"neib","N","NE","E","SE","S","SW","W","NW"};
String
[]
titleClusters
=
new
String
[
titleFlaps
.
length
+
dbg_max_cluster_show
];
String
[]
titleClusters
=
new
String
[
titleFlaps
.
length
+
dbg_max_cluster_show
];
// int indxClust = 0;
// int indxClust = 0;
for
(
int
i
=
0
;
i
<
titleFlaps
.
length
;
i
++){
for
(
int
i
=
0
;
i
<
titleFlaps
.
length
;
i
++){
titleClusters
[
i
]
=
titleFlaps
[
i
];
titleClusters
[
i
]
=
titleFlaps
[
i
];
}
}
...
@@ -3428,7 +3917,7 @@ public class TileProcessor {
...
@@ -3428,7 +3917,7 @@ public class TileProcessor {
}
}
}
}
sdfa_instance
.
showArrays
(
dbg_clusters_show
,
tilesX
,
tilesY
,
true
,
"first "
+
dbg_max_cluster_show
+
" clusters"
,
titleClusters
);
sdfa_instance
.
showArrays
(
dbg_clusters_show
,
tilesX
,
tilesY
,
true
,
"first "
+
dbg_max_cluster_show
+
" clusters"
,
titleClusters
);
}
int
numScans
=
0
;
int
numScans
=
0
;
if
(
clt_parameters
.
shUseFlaps
)
{
if
(
clt_parameters
.
shUseFlaps
)
{
...
@@ -3442,6 +3931,7 @@ public class TileProcessor {
...
@@ -3442,6 +3931,7 @@ public class TileProcessor {
clustersNO
,
// int [] clusters_in,
clustersNO
,
// int [] clusters_in,
disparity_far
,
disparity_far
,
disparity_near
,
disparity_near
,
clt_parameters
.
show_shells
,
debugLevel
);
debugLevel
);
}
else
{
}
else
{
...
@@ -3483,6 +3973,7 @@ public class TileProcessor {
...
@@ -3483,6 +3973,7 @@ public class TileProcessor {
boolean
[]
selection
,
// start with only from selections (if not null, continue regardless)
boolean
[]
selection
,
// start with only from selections (if not null, continue regardless)
boolean
[]
tilesHor
,
// horizontal correlation tiles used for composite disparity/strength;
boolean
[]
tilesHor
,
// horizontal correlation tiles used for composite disparity/strength;
int
max_len
,
// maximal length to cover
int
max_len
,
// maximal length to cover
double
poles_ratio
,
// Maximal ratio of invisible to visible pole length
double
min_new_strength
,
// set strength to hor_strength, but not less than this
double
min_new_strength
,
// set strength to hor_strength, but not less than this
boolean
force_disparity
,
// copy disparity down (false - use horDisparity
boolean
force_disparity
,
// copy disparity down (false - use horDisparity
boolean
keepStrength
// do not reduce composite strength from what it was before replacement
boolean
keepStrength
// do not reduce composite strength from what it was before replacement
...
@@ -3504,6 +3995,18 @@ public class TileProcessor {
...
@@ -3504,6 +3995,18 @@ public class TileProcessor {
if
(
tilesHor
[
nTileEnd
]
||
if
(
tilesHor
[
nTileEnd
]
||
(
disparity
[
nTileEnd
]
>
disparity
[
nTile
])){
(
disparity
[
nTileEnd
]
>
disparity
[
nTile
])){
if
(((
nTileEnd
-
nTile
)
<=
(
max_len
*
tilesX
))
||
(
max_len
==
0
)){
if
(((
nTileEnd
-
nTile
)
<=
(
max_len
*
tilesX
))
||
(
max_len
==
0
)){
// Calculate length of visible pole (above break)
if
(
poles_ratio
>
0.0
){
int
pole_length
=
1
;
for
(
int
nt
=
nTile
-
tilesX
;
nt
>=
0
;
nt
-=
tilesX
){
if
(!
tilesHor
[
nt
])
break
;
pole_length
++;
}
if
((
nTileEnd
-
nTile
)
>
(
poles_ratio
*
pole_length
*
tilesX
)){
break
;
// too long invisible part
}
}
for
(
int
nt
=
nTile
+
tilesX
;
nt
<
nTileEnd
;
nt
+=
tilesX
){
for
(
int
nt
=
nTile
+
tilesX
;
nt
<
nTileEnd
;
nt
+=
tilesX
){
disparity
[
nt
]
=
force_disparity
?
disparity
[
nTile
]:
hor_disparity
[
nt
];
disparity
[
nt
]
=
force_disparity
?
disparity
[
nTile
]:
hor_disparity
[
nt
];
if
(!
keepStrength
||
(
strength
[
nt
]
<
hor_strength
[
nt
]))
{
if
(!
keepStrength
||
(
strength
[
nt
]
<
hor_strength
[
nt
]))
{
...
@@ -3513,8 +4016,9 @@ public class TileProcessor {
...
@@ -3513,8 +4016,9 @@ public class TileProcessor {
tilesHor
[
nt
]
=
true
;
tilesHor
[
nt
]
=
true
;
num_replaced
++;
num_replaced
++;
}
}
break
;
//
break;
}
}
break
;
}
}
}
}
}
}
...
@@ -3763,6 +4267,13 @@ public class TileProcessor {
...
@@ -3763,6 +4267,13 @@ public class TileProcessor {
for
(
int
i
=
0
;
i
<
data
.
length
;
i
++)
data
[
i
]
*=
strength
[
i
];
for
(
int
i
=
0
;
i
<
data
.
length
;
i
++)
data
[
i
]
*=
strength
[
i
];
SharpBlurTiles
(
strength
,
sigma
,
k
,
vert
);
SharpBlurTiles
(
strength
,
sigma
,
k
,
vert
);
// Maybe not needed? Can NaN be blured?
for
(
int
i
=
0
;
i
<
data
.
length
;
i
++)
{
if
(
Double
.
isNaN
(
data
[
i
]))
data
[
i
]
=
0.0
;
}
SharpBlurTiles
(
data
,
sigma
,
k
,
vert
);
SharpBlurTiles
(
data
,
sigma
,
k
,
vert
);
for
(
int
i
=
0
;
i
<
data
.
length
;
i
++)
{
for
(
int
i
=
0
;
i
<
data
.
length
;
i
++)
{
if
(
strength
[
i
]
!=
0.0
)
data
[
i
]
/=
strength
[
i
];
if
(
strength
[
i
]
!=
0.0
)
data
[
i
]
/=
strength
[
i
];
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment