Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
I
imagej-elphel
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
3
Issues
3
List
Board
Labels
Milestones
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Commits
Issue Boards
Open sidebar
Elphel
imagej-elphel
Commits
65a99dbc
Commit
65a99dbc
authored
Oct 24, 2024
by
Andrey Filippov
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
next snapshot before adding more to weights/fX
parent
37760e8c
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
417 additions
and
536 deletions
+417
-536
VegetationLMA.java
...main/java/com/elphel/imagej/vegetation/VegetationLMA.java
+417
-536
No files found.
src/main/java/com/elphel/imagej/vegetation/VegetationLMA.java
View file @
65a99dbc
...
@@ -111,7 +111,7 @@ public class VegetationLMA {
...
@@ -111,7 +111,7 @@ public class VegetationLMA {
private
ArrayList
<
HashSet
<
Integer
>>
elev_contribs
;
// new ArrayList<HashSet<Integer>>(woi_length);
private
ArrayList
<
HashSet
<
Integer
>>
elev_contribs
;
// new ArrayList<HashSet<Integer>>(woi_length);
private
ArrayList
<
HashSet
<
Integer
>>
param_contribs
;
// new ArrayList<HashSet<Integer>>(woi_length);
private
ArrayList
<
HashSet
<
Integer
>>
param_contribs
;
// new ArrayList<HashSet<Integer>>(woi_length);
private
boolean
[][]
param_pivot
;
// [all_pars][all_pars] which parameters parameters have common dependents
private
boolean
[][]
param_pivot
;
// [all_pars][all_pars] which parameters parameters have common dependents
private
int
[][]
param_lists
;
// [all_pars][related] - for each parameter - null or integer array of related (by JtJ) parameters
// TODO: Compare with/without pivot to verify/troubleshoot.
// TODO: Compare with/without pivot to verify/troubleshoot.
...
@@ -195,7 +195,7 @@ public class VegetationLMA {
...
@@ -195,7 +195,7 @@ public class VegetationLMA {
public
int
[][]
terr_neibs
;
// corresponds to parameters for terrain, for smoothing undefined terrain, similar to alpha_neibs
public
int
[][]
terr_neibs
;
// corresponds to parameters for terrain, for smoothing undefined terrain, similar to alpha_neibs
public
int
[][]
veget_neibs
;
// corresponds to parameters for vegetation, for smoothing undefined vegetation, similar to alpha_neibs
public
int
[][]
veget_neibs
;
// corresponds to parameters for vegetation, for smoothing undefined vegetation, similar to alpha_neibs
public
int
[][]
elevation_neibs
;
// corresponds to parameters for elevation, for smoothing undefined elevations, similar to alpha_neibs
public
int
[][]
elevation_neibs
;
// corresponds to parameters for elevation, for smoothing undefined elevations, similar to alpha_neibs
public
int
[][]
neibs_neibs
;
public
double
delta
=
1
e
-
5
;
// 7;
public
double
delta
=
1
e
-
5
;
// 7;
...
@@ -628,6 +628,30 @@ public class VegetationLMA {
...
@@ -628,6 +628,30 @@ public class VegetationLMA {
ind_pars_elevation
,
// final int ind_samples, // ind_pars_vegetation_alpha
ind_pars_elevation
,
// final int ind_samples, // ind_pars_vegetation_alpha
num_pars_elevation
);
// final int num_samples); // num_pars_vegetation_alpha
num_pars_elevation
);
// final int num_samples); // num_pars_vegetation_alpha
neibs_neibs
=
new
int
[
par_rindex
.
length
][];
addSecondNeighbors
(
neibs_neibs
,
// final int [][] second_neibs,
terr_neibs
,
// final int [][] par_neibs,
ind_pars_terrain
);
// final int start_index)
addSecondNeighbors
(
neibs_neibs
,
// final int [][] second_neibs,
alpha_neibs
,
// final int [][] par_neibs,
ind_pars_alpha
);
// final int start_index)
addSecondNeighbors
(
neibs_neibs
,
// final int [][] second_neibs,
veget_neibs
,
// final int [][] par_neibs,
ind_pars_vegetation
);
// final int start_index)
addSecondNeighbors
(
neibs_neibs
,
// final int [][] second_neibs,
elevation_neibs
,
// final int [][] par_neibs,
ind_pars_elevation
);
// final int start_index)
if
(
debugLevel
>
4
)
{
showPivot
(
neibs_neibs
,
// int [][] neibs_neibs,
"neibs_neibs.tiff"
);
// String title)
}
setupYSrc
(
// updated, generates both y and y_hf(different lengths)
setupYSrc
(
// updated, generates both y and y_hf(different lengths)
(
hifreq_weight
>
0
));
// boolean use_hf);
(
hifreq_weight
>
0
));
// boolean use_hf);
...
@@ -1144,6 +1168,12 @@ public class VegetationLMA {
...
@@ -1144,6 +1168,12 @@ public class VegetationLMA {
}
}
}
}
System
.
out
.
println
(
"getWJtJlambdaDebug(): max_err="
+
max_err
);
System
.
out
.
println
(
"getWJtJlambdaDebug(): max_err="
+
max_err
);
showPivot
(
jtj
,
// double [][] jtj,
"jtj_pivoted.tiff"
);
// String title)
showPivot
(
jtj_full
,
// double [][] jtj,
"jtj_full.tiff"
);
// String title)
return
jtj_full
;
// old version
return
jtj_full
;
// old version
}
}
...
@@ -1623,7 +1653,7 @@ public class VegetationLMA {
...
@@ -1623,7 +1653,7 @@ public class VegetationLMA {
*/
*/
}
}
if
(
debugLevel
>
1
)
{
if
(
debugLevel
>
6
)
{
String
title
=
"setupElevationLMA.tiff"
;
String
title
=
"setupElevationLMA.tiff"
;
String
[]
titles
=
new
String
[
num_scenes
];
String
[]
titles
=
new
String
[
num_scenes
];
String
[]
top_titles
=
{
"sum_weights"
,
"contribs"
};
String
[]
top_titles
=
{
"sum_weights"
,
"contribs"
};
...
@@ -1694,6 +1724,170 @@ public class VegetationLMA {
...
@@ -1694,6 +1724,170 @@ public class VegetationLMA {
return
pivot
;
return
pivot
;
}
}
private
static
void
addRelatedFromSets
(
final
int
[][]
par_relations
,
final
int
par_index
,
final
int
par_number
,
final
ArrayList
<
HashSet
<
Integer
>>
sets
)
{
final
boolean
[][]
pivot
=
new
boolean
[
par_number
][
par_number
];
final
Thread
[]
threads
=
ImageDtt
.
newThreadArray
();
final
AtomicInteger
ai
=
new
AtomicInteger
(
0
);
for
(
int
ithread
=
0
;
ithread
<
threads
.
length
;
ithread
++)
{
threads
[
ithread
]
=
new
Thread
()
{
public
void
run
()
{
HashSet
<
Integer
>
this_par_set
=
new
HashSet
<
Integer
>();
//related parameters to nPar
for
(
int
nPar
=
ai
.
getAndIncrement
();
nPar
<
par_number
;
nPar
=
ai
.
getAndIncrement
()){
this_par_set
.
clear
();
int
full_par
=
nPar
+
par_index
;
for
(
HashSet
<
Integer
>
par_set:
sets
)
{
if
(
par_set
.
contains
(
full_par
))
{
this_par_set
.
addAll
(
par_set
);
}
}
if
(!
this_par_set
.
isEmpty
())
{
if
(
par_relations
[
full_par
]
!=
null
)
{
for
(
int
par:
par_relations
[
full_par
])
{
this_par_set
.
add
(
par
);
}
}
int
[]
pars_arr
=
this_par_set
.
stream
().
mapToInt
(
Integer:
:
intValue
).
toArray
();
Arrays
.
sort
(
pars_arr
);
// optional
par_relations
[
full_par
]
=
pars_arr
;
}
}
}
};
}
ImageDtt
.
startAndJoin
(
threads
);
return
;
}
private
static
int
[][]
addNeighborsLists
(
final
int
[][]
pars
,
final
int
[][]
par_neibs
)
{
final
int
[][]
pars_with_neibs
=
new
int
[
pars
.
length
][];
if
(
pars
.
length
!=
par_neibs
.
length
)
{
throw
new
IllegalArgumentException
(
"addNeighborsLists(): pars.length="
+
pars
.
length
+
" != par_neibs.length ="
+
par_neibs
.
length
);
}
final
Thread
[]
threads
=
ImageDtt
.
newThreadArray
();
final
AtomicInteger
ai
=
new
AtomicInteger
(
0
);
for
(
int
ithread
=
0
;
ithread
<
threads
.
length
;
ithread
++)
{
threads
[
ithread
]
=
new
Thread
()
{
public
void
run
()
{
HashSet
<
Integer
>
this_par_set
=
new
HashSet
<
Integer
>();
//related parameters to nPar
for
(
int
nPar
=
ai
.
getAndIncrement
();
nPar
<
par_neibs
.
length
;
nPar
=
ai
.
getAndIncrement
()){
if
((
nPar
==
84
)
||
(
nPar
==
1373
))
{
System
.
out
.
println
(
"addNeighborsLists() nPar="
+
nPar
);
}
if
((
pars
[
nPar
]
!=
null
)
&&
(
pars
[
nPar
].
length
>
0
))
{
this_par_set
.
clear
();
for
(
int
other_par:
pars
[
nPar
])
{
this_par_set
.
add
(
other_par
);
if
((
par_neibs
[
other_par
]
!=
null
)
&&
(
par_neibs
[
other_par
].
length
>
0
))
{
int
[]
neib_pars
=
par_neibs
[
other_par
];
// should all be >= 0
for
(
int
par:
neib_pars
)
{
this_par_set
.
add
(
par
);
}
}
}
if
(!
this_par_set
.
isEmpty
())
{
// anything to add - probably always
//https://stackoverflow.com/questions/2451184/how-can-i-convert-a-java-hashsetinteger-to-a-primitive-int-array
int
[]
pars_arr
=
this_par_set
.
stream
().
mapToInt
(
Integer:
:
intValue
).
toArray
();
Arrays
.
sort
(
pars_arr
);
// optional
pars_with_neibs
[
nPar
]
=
pars_arr
;
}
}
}
}
};
}
ImageDtt
.
startAndJoin
(
threads
);
return
pars_with_neibs
;
}
private
static
int
[][]
addNeighborsLists_wrong
(
final
int
[][]
pars
,
final
int
[][]
par_neibs
)
{
final
int
[][]
pars_with_neibs
=
new
int
[
pars
.
length
][];
if
(
pars
.
length
!=
par_neibs
.
length
)
{
throw
new
IllegalArgumentException
(
"addNeighborsLists(): pars.length="
+
pars
.
length
+
" != par_neibs.length ="
+
par_neibs
.
length
);
}
final
Thread
[]
threads
=
ImageDtt
.
newThreadArray
();
final
AtomicInteger
ai
=
new
AtomicInteger
(
0
);
for
(
int
ithread
=
0
;
ithread
<
threads
.
length
;
ithread
++)
{
threads
[
ithread
]
=
new
Thread
()
{
public
void
run
()
{
HashSet
<
Integer
>
this_par_set
=
new
HashSet
<
Integer
>();
//related parameters to nPar
for
(
int
nPar
=
ai
.
getAndIncrement
();
nPar
<
par_neibs
.
length
;
nPar
=
ai
.
getAndIncrement
()){
if
((
nPar
==
84
)
||
(
nPar
==
1373
))
{
System
.
out
.
println
(
"addNeighborsLists() nPar="
+
nPar
);
}
if
((
par_neibs
[
nPar
]
!=
null
)
&&
(
par_neibs
[
nPar
].
length
>
0
))
{
this_par_set
.
clear
();
for
(
int
neib_par:
par_neibs
[
nPar
])
{
int
[]
neib_pars
=
pars
[
neib_par
];
// should all be >= 0
if
(
neib_pars
!=
null
)
{
for
(
int
par:
neib_pars
)
{
this_par_set
.
add
(
par
);
}
}
}
if
(!
this_par_set
.
isEmpty
())
{
// anything to add - probably always
if
(
pars
[
nPar
]
!=
null
)
{
// adding it's own parameters
for
(
int
par:
pars
[
nPar
])
{
this_par_set
.
add
(
par
);
}
}
//https://stackoverflow.com/questions/2451184/how-can-i-convert-a-java-hashsetinteger-to-a-primitive-int-array
int
[]
pars_arr
=
this_par_set
.
stream
().
mapToInt
(
Integer:
:
intValue
).
toArray
();
Arrays
.
sort
(
pars_arr
);
// optional
pars_with_neibs
[
nPar
]
=
pars_arr
;
}
}
}
}
};
}
ImageDtt
.
startAndJoin
(
threads
);
return
pars_with_neibs
;
}
public
static
boolean
[][]
listsToPivot
(
final
int
[][]
par_lists
){
final
boolean
[][]
pivot
=
new
boolean
[
par_lists
.
length
][
par_lists
.
length
];
final
Thread
[]
threads
=
ImageDtt
.
newThreadArray
();
final
AtomicInteger
ai
=
new
AtomicInteger
(
0
);
for
(
int
ithread
=
0
;
ithread
<
threads
.
length
;
ithread
++)
{
threads
[
ithread
]
=
new
Thread
()
{
public
void
run
()
{
for
(
int
nPar
=
ai
.
getAndIncrement
();
nPar
<
par_lists
.
length
;
nPar
=
ai
.
getAndIncrement
()){
pivot
[
nPar
][
nPar
]
=
true
;
int
[]
pars
=
par_lists
[
nPar
];
if
(
pars
!=
null
)
{
for
(
int
par
:
pars
)
if
(
par
>
nPar
){
pivot
[
nPar
][
par
]
=
true
;
pivot
[
par
][
nPar
]
=
true
;
}
}
}
}
};
}
ImageDtt
.
startAndJoin
(
threads
);
return
pivot
;
}
/**
/**
* Add neighbors of parameters to the pivot (square symmetrical) array of the dependent
* Add neighbors of parameters to the pivot (square symmetrical) array of the dependent
* parameters to improve performance of JtJ calculation.
* parameters to improve performance of JtJ calculation.
...
@@ -1863,7 +2057,7 @@ public class VegetationLMA {
...
@@ -1863,7 +2057,7 @@ public class VegetationLMA {
final
int
dbg_n
=
-
7402
;
// -69992;
final
int
dbg_n
=
-
7402
;
// -69992;
final
int
fdbg_scene
=
dbg_scene
;
final
int
fdbg_scene
=
dbg_scene
;
final
int
dbg_cols
=
2
,
dbg_rows
=
2
;
final
int
dbg_cols
=
2
,
dbg_rows
=
2
;
final
int
dbg_windex
=
-
1
;
for
(
int
ithread
=
0
;
ithread
<
threads
.
length
;
ithread
++)
{
for
(
int
ithread
=
0
;
ithread
<
threads
.
length
;
ithread
++)
{
threads
[
ithread
]
=
new
Thread
()
{
threads
[
ithread
]
=
new
Thread
()
{
...
@@ -1965,6 +2159,9 @@ public class VegetationLMA {
...
@@ -1965,6 +2159,9 @@ public class VegetationLMA {
}
// for (int wvindex = 0; wvindex < woi_veg_length; wvindex++) if (valid_vegetation[wvindex]) {
}
// for (int wvindex = 0; wvindex < woi_veg_length; wvindex++) if (valid_vegetation[wvindex]) {
for
(
int
windx
=
0
;
windx
<
woi_length
;
windx
++)
if
(
valid_terrain
[
windx
])
{
for
(
int
windx
=
0
;
windx
<
woi_length
;
windx
++)
if
(
valid_terrain
[
windx
])
{
if
(
windx
==
dbg_windex
)
{
System
.
out
.
println
(
"getFxDerivs() nScene="
+
nScene
+
", windx="
+
windx
);
}
int
wx
=
windx
%
woi
.
width
;
// relative to woi
int
wx
=
windx
%
woi
.
width
;
// relative to woi
int
wy
=
windx
/
woi
.
width
;
// relative to woi
int
wy
=
windx
/
woi
.
width
;
// relative to woi
int
x
=
wx
+
woi
.
x
;
// relative to full
int
x
=
wx
+
woi
.
x
;
// relative to full
...
@@ -2208,7 +2405,7 @@ public class VegetationLMA {
...
@@ -2208,7 +2405,7 @@ public class VegetationLMA {
// re-consolidate contributors
// re-consolidate contributors
if
(
need_derivs
)
{
if
(
need_derivs
)
{
for
(
int
i
=
0
;
i
<
woi_length
;
i
++)
{
for
(
int
i
=
0
;
i
<
woi_length
;
i
++)
{
contrib_combo
.
get
(
i
).
clear
();
// erase sets; add(new HashSet<Integer>());
//
contrib_combo.get(i).clear(); // erase sets; add(new HashSet<Integer>());
}
}
ai
.
set
(
0
);
ai
.
set
(
0
);
for
(
int
ithread
=
0
;
ithread
<
threads
.
length
;
ithread
++)
{
for
(
int
ithread
=
0
;
ithread
<
threads
.
length
;
ithread
++)
{
...
@@ -2477,7 +2674,8 @@ public class VegetationLMA {
...
@@ -2477,7 +2674,8 @@ public class VegetationLMA {
fX
[
nx
]
+=
terr_lpf
*
(
vector
[
np
]
-
avg
);
// + terr_pull0 * (vector[np]);
fX
[
nx
]
+=
terr_lpf
*
(
vector
[
np
]
-
avg
);
// + terr_pull0 * (vector[np]);
if
(
terr_pull0
>
0
)
{
if
(
terr_pull0
>
0
)
{
// int findx = data_source[np][DATA_SOURCE_HEAD][DATA_SOURCE_HEAD_FINDEX];
// int findx = data_source[np][DATA_SOURCE_HEAD][DATA_SOURCE_HEAD_FINDEX];
int
findx
=
y_src
[
np
][
YSRC_FINDEX
];
// int findx = y_src[np][YSRC_FINDEX];
int
findx
=
par_rindex
[
np
][
1
];
double
terr_pull
=
terrain_average
[
findx
];
// maybe use tvao[TVAO_TERRAIN][findx] ?
double
terr_pull
=
terrain_average
[
findx
];
// maybe use tvao[TVAO_TERRAIN][findx] ?
if
(
Double
.
isNaN
(
terr_pull
))
{
if
(
Double
.
isNaN
(
terr_pull
))
{
terr_pull
=
0
;
// should not happen
terr_pull
=
0
;
// should not happen
...
@@ -2532,8 +2730,8 @@ public class VegetationLMA {
...
@@ -2532,8 +2730,8 @@ public class VegetationLMA {
fX
[
nx
]
+=
veget_lpf
*
(
vector
[
np
]
-
avg
);
// + veget_pull0 * vector[np];
fX
[
nx
]
+=
veget_lpf
*
(
vector
[
np
]
-
avg
);
// + veget_pull0 * vector[np];
if
(
veget_pull0
>
0
)
{
if
(
veget_pull0
>
0
)
{
// int findx = data_source[np][DATA_SOURCE_HEAD][DATA_SOURCE_HEAD_FINDEX];
// int findx = data_source[np][DATA_SOURCE_HEAD][DATA_SOURCE_HEAD_FINDEX];
int
findx
=
y_src
[
np
][
YSRC_FINDEX
];
//
int findx = y_src[np][YSRC_FINDEX];
int
findx
=
par_rindex
[
np
][
1
];
double
veget_pull
=
vegetation_pull
[
findx
];
// maybe use tvao[TVAO_TERRAIN][findx] ?
double
veget_pull
=
vegetation_pull
[
findx
];
// maybe use tvao[TVAO_TERRAIN][findx] ?
if
(
Double
.
isNaN
(
veget_pull
))
{
if
(
Double
.
isNaN
(
veget_pull
))
{
veget_pull
=
0
;
// should not happen unless too far from the vegetation
veget_pull
=
0
;
// should not happen unless too far from the vegetation
...
@@ -2603,13 +2801,13 @@ public class VegetationLMA {
...
@@ -2603,13 +2801,13 @@ public class VegetationLMA {
fX
[
nx
]
+=
elevation_lpf
*
(
vector
[
np
]
-
avg
);
// + veget_pull0 * vector[np];
fX
[
nx
]
+=
elevation_lpf
*
(
vector
[
np
]
-
avg
);
// + veget_pull0 * vector[np];
if
(
elevation_pull0
>
0
)
{
if
(
elevation_pull0
>
0
)
{
// int findx = data_source[np][DATA_SOURCE_HEAD][DATA_SOURCE_HEAD_FINDEX];
// int findx = data_source[np][DATA_SOURCE_HEAD][DATA_SOURCE_HEAD_FINDEX];
int
findx
=
y_src
[
np
][
YSRC_FINDEX
];
//
int findx = y_src[np][YSRC_FINDEX];
int
findx
=
par_rindex
[
np
][
1
];
double
elevation_pull
=
tvao
[
TVAO_ELEVATION
][
findx
];
// vegetation_pull[findx]; // maybe use tvao[TVAO_TERRAIN][findx] ?
double
elevation_pull
=
tvao
[
TVAO_ELEVATION
][
findx
];
// vegetation_pull[findx]; // maybe use tvao[TVAO_TERRAIN][findx] ?
if
(
Double
.
isNaN
(
elevation_pull
))
{
if
(
Double
.
isNaN
(
elevation_pull
))
{
elevation_pull
=
0
;
// should not happen unless too far from the vegetation
elevation_pull
=
0
;
// should not happen unless too far from the vegetation
}
}
fX
[
nx
]
+=
veget
_pull0
*
(
vector
[
np
]
-
elevation_pull
);
fX
[
nx
]
+=
elevation
_pull0
*
(
vector
[
np
]
-
elevation_pull
);
}
}
if
(
jt
!=
null
)
{
if
(
jt
!=
null
)
{
...
@@ -2628,7 +2826,7 @@ public class VegetationLMA {
...
@@ -2628,7 +2826,7 @@ public class VegetationLMA {
}
}
ImageDtt
.
startAndJoin
(
threads
);
ImageDtt
.
startAndJoin
(
threads
);
}
}
/*
// create pivot table
// create pivot table
if (need_derivs) {
if (need_derivs) {
param_pivot =getPivotFromSets(
param_pivot =getPivotFromSets(
...
@@ -2666,16 +2864,37 @@ public class VegetationLMA {
...
@@ -2666,16 +2864,37 @@ public class VegetationLMA {
if (debug_level>4) showPivot(param_pivot, "pivot-elevation");
if (debug_level>4) showPivot(param_pivot, "pivot-elevation");
}
}
}
}
return
fX
;
*/
// create pivot table
if
(
need_derivs
)
{
param_lists
=
new
int
[
par_rindex
.
length
][];
addRelatedFromSets
(
param_lists
,
// final int [][] par_relations,
0
,
// final int par_index,
param_lists
.
length
,
// final int par_number,
contrib_combo
);
// final ArrayList<HashSet<Integer>> sets) {
if
(
debug_level
>
4
)
showPivot
(
param_lists
,
"pivot-params-from-sets"
);
param_lists
=
addNeighborsLists
(
param_lists
,
// final int [][] pars,
neibs_neibs
);
// final int [][] par_neibs)
if
(
debug_level
>
4
)
showPivot
(
param_lists
,
"pivot-params-with-neibs"
);
param_pivot
=
listsToPivot
(
param_lists
);
// final int [][] par_lists)
if
(
debug_level
>
4
)
showPivot
(
param_pivot
,
"pivot-all"
);
}
return
fX
;
}
}
public
static
void
showPivot
(
public
static
void
showPivot
(
boolean
[][]
pivot
,
boolean
[][]
pivot
,
String
title
)
{
String
title
)
{
int
size
=
pivot
.
length
;
int
size
=
pivot
.
length
;
if
(
pivot
[
0
].
length
!=
size
)
{
if
(
pivot
[
0
].
length
!=
size
)
{
throw
new
IllegalArgumentException
(
"showPivot(): Expecting square array, got "
+
throw
new
IllegalArgumentException
(
"showPivot(): Expecting square
boolean
array, got "
+
pivot
[
0
].
length
+
"x"
+
size
+
"."
);
pivot
[
0
].
length
+
"x"
+
size
+
"."
);
}
}
...
@@ -2686,574 +2905,164 @@ public class VegetationLMA {
...
@@ -2686,574 +2905,164 @@ public class VegetationLMA {
dbg_img
[
i
*
size
+
j
]
=
pivot
[
i
][
j
]?
1.0
:
0.0
;
dbg_img
[
i
*
size
+
j
]
=
pivot
[
i
][
j
]?
1.0
:
0.0
;
}
}
}
}
ShowDoubleFloatArrays
.
showArrays
(
int
num_diff
=
0
;
dbg_img
,
int
num_non0
=
0
;
title
);
for
(
int
i
=
0
;
i
<
size
;
i
++)
{
for
(
int
j
=
i
+
1
;
j
<
size
;
j
++)
{
if
(
dbg_img
[
i
*
size
+
j
]
!=
0
)
{
num_non0
++;
}
}
if
(
dbg_img
[
i
*
size
+
j
]
!=
dbg_img
[
j
*
size
+
i
])
{
num_diff
++;
private
double
[]
getFxDerivs_old
(
final
double
[]
vector
,
final
double
[][]
jt
,
// should be null or initialized with [vector.length][]
final
int
debug_level
)
{
setupElevationLMA
(
// return pivot?
vector
,
// final double [] vector,
(
jt
!=
null
),
// final boolean calc_derivatives,
debug_level
);
// final int debugLevel) {
final
boolean
use_hf
=
y_vector
.
length
>
data_source
.
length
;
// twice longer
// using 0.5*(1-cos(alpha/2pi) instead of alpha. alpha < 0 -> 0, alpha > 1 -> 1. Depends on other terms for stability
double
[]
fX
=
new
double
[
weights
.
length
];
// num_pairs + vector.length];
if
(
jt
!=
null
)
{
for
(
int
i
=
0
;
i
<
jt
.
length
;
i
++)
{
jt
[
i
]
=
new
double
[
weights
.
length
];
// weights.length];
}
}
}
}
int
dbg_scene
=
-
20
;
final
int
dbg_n
=
-
7402
;
// -69992;
final
int
fdbg_scene
=
dbg_scene
;
final
int
dbg_cols
=
2
,
dbg_rows
=
2
;
final
Thread
[]
threads
=
ImageDtt
.
newThreadArray
();
final
AtomicInteger
ai
=
new
AtomicInteger
(
0
);
for
(
int
ithread
=
0
;
ithread
<
threads
.
length
;
ithread
++)
{
threads
[
ithread
]
=
new
Thread
()
{
public
void
run
()
{
double
[]
vegetation
=
new
double
[
4
];
double
[]
alpha
=
new
double
[
4
];
for
(
int
n
=
ai
.
getAndIncrement
();
n
<
data_source
.
length
;
n
=
ai
.
getAndIncrement
())
{
// low-frequency
if
(
n
==
dbg_n
)
{
System
.
out
.
println
(
"getFxDerivs(): n="
+
n
);
}
}
int
indx
=
data_source
[
n
][
DATA_SOURCE_HEAD
][
DATA_SOURCE_HEAD_FINDEX
];
if
(
num_diff
!=
0
)
{
int
tindx
=
data_source
[
n
][
DATA_SOURCE_HEAD
][
DATA_SOURCE_HEAD_TINDEX
];
System
.
out
.
println
(
"showPivot(boolean[][]): not-symmetrical, number of mismatches is "
+
num_diff
);
int
scene
=
data_source
[
n
][
DATA_SOURCE_HEAD
][
DATA_SOURCE_HEAD_SCENE
];
if
(
scene
==
fdbg_scene
)
{
int
drow
=
indx
/
full
.
width
-
woi
.
y
;
int
dcol
=
indx
%
full
.
width
-
woi
.
x
;
if
((
dcol
<
dbg_cols
)
&&
(
drow
<
dbg_rows
))
{
System
.
out
.
println
(
"getFxDerivs(): scene="
+
scene
+
", drow="
+
drow
+
" dcol="
+
dcol
);
}
}
System
.
out
.
println
(
"showPivot(boolean[][]): num non-zero "
+
num_non0
+
" ("
+(
100.0
*
num_non0
/
size
/
size
)+
"%)"
);
ShowDoubleFloatArrays
.
showArrays
(
dbg_img
,
title
);
}
}
double
terrain
=
(
tindx
>=
0
)
?
vector
[
tindx
]
:
tvao
[
TVAO_TERRAIN
][
indx
];
public
static
void
showPivot
(
double
[]
cw
=
corners_weights
[
n
];
double
[][]
jtj
,
double
d
;
String
title
)
{
int
[]
indx_vegetation
=
data_source
[
n
][
DATA_SOURCE_CORN_VEGET
];
int
size
=
jtj
.
length
;
int
[]
indx_alpha
=
data_source
[
n
][
DATA_SOURCE_CORN_ALPHA
];
if
(
jtj
[
0
].
length
!=
size
)
{
double
sum_v
=
0
,
sum_a
=
0
,
sum_vw
=
0
,
sum_aw
=
0
;
throw
new
IllegalArgumentException
(
"showPivot(): Expecting square double array, got "
+
if
(
cw
!=
null
)
{
jtj
[
0
].
length
+
"x"
+
size
+
"."
);
for
(
int
i
=
0
;
i
<
4
;
i
++
)
{
int
iv
=
indx_vegetation
[
i
],
ia
=
indx_alpha
[
i
];
/*
double v = (iv >= 0) ? vector[iv]: tvao[TVAO_VEGETATION][-1-iv];
double a = (ia >= 0) ? vector[ia]: tvao[TVAO_ALPHA][-1-ia];
if (!Double.isNaN(v)) {
sum_vw += cw[i];
sum_v +=cw[i] * v;
}
if (!Double.isNaN(a)) {
sum_aw += cw[i];
sum_a +=cw[i] * a;
}
*/
vegetation
[
i
]
=
cw
[
i
]
*
((
iv
>=
0
)
?
vector
[
iv
]:
tvao
[
TVAO_VEGETATION
][-
1
-
iv
]);
alpha
[
i
]
=
cw
[
i
]
*
((
ia
>=
0
)
?
vector
[
ia
]:
tvao
[
TVAO_ALPHA
][-
1
-
ia
]);
sum_v
+=
vegetation
[
i
];
sum_a
+=
alpha
[
i
];
}
}
double
[]
dbg_img
=
new
double
[
size
*
size
];
for
(
int
i
=
0
;
i
<
size
;
i
++)
{
for
(
int
j
=
0
;
j
<
size
;
j
++)
{
dbg_img
[
i
*
size
+
j
]
=
(
jtj
[
i
][
j
]!=
0
)
?
1.0
:
0.0
;
}
}
if
((
cw
!=
null
)
&&
!
Double
.
isNaN
(
sum_v
)
&&
!
Double
.
isNaN
(
sum_a
)){
double
k
;
if
(
alpha_piece_linear
)
{
k
=
(
sum_a
<
0
)?
0
:
((
sum_a
>
1
)
?
1.0
:
sum_a
);
}
else
{
k
=
(
sum_a
<
0
)?
0
:
((
sum_a
>
1
)
?
1.0
:
0.5
*
(
1.0
-
Math
.
cos
(
sum_a
*
Math
.
PI
)));
}
}
d
=
terrain
*
(
1.0
-
k
)
+
sum_v
*
k
;
int
num_diff
=
0
;
int
num_non0
=
0
;
if
(
jt
!=
null
)
{
for
(
int
i
=
0
;
i
<
size
;
i
++)
{
if
(
tindx
>=
0
)
{
for
(
int
j
=
i
+
1
;
j
<
size
;
j
++)
{
jt
[
tindx
][
n
]
=
1
-
k
;
// sum_a; // d/dterrain
if
(
dbg_img
[
i
*
size
+
j
]
!=
0
)
{
num_non0
++;
}
}
for
(
int
i
=
0
;
i
<
4
;
i
++
)
{
if
(
dbg_img
[
i
*
size
+
j
]
!=
dbg_img
[
j
*
size
+
i
])
{
if
((
indx_vegetation
!=
null
)
&&
(
indx_vegetation
[
i
]
>=
0
))
{
num_diff
++;
jt
[
data_source
[
n
][
DATA_SOURCE_CORN_VEGET
][
i
]][
n
]
=
cw
[
i
]
*
k
;
// sum_a; // d/dvegetation[i]
}
}
if
((
indx_alpha
!=
null
)
&&
(
indx_alpha
[
i
]
>=
0
)
&&
(
sum_a
>
0
)
&&
(
sum_a
<
1.0
))
{
if
(
alpha_piece_linear
)
{
jt
[
data_source
[
n
][
DATA_SOURCE_CORN_ALPHA
][
i
]][
n
]
=
cw
[
i
]
*
(
sum_v
-
terrain
);
// d/dalpha[i]
}
else
{
jt
[
data_source
[
n
][
DATA_SOURCE_CORN_ALPHA
][
i
]][
n
]
=
cw
[
i
]
*
(
sum_v
-
terrain
)
*
0.5
*
Math
.
PI
*
Math
.
sin
(
sum_a
*
Math
.
PI
);
// d/dalpha[i]
}
}
}
}
if
(
num_diff
!=
0
)
{
System
.
out
.
println
(
"showPivot(double[][]): not-symmetrical, number of mismatches is "
+
num_diff
);
}
}
System
.
out
.
println
(
"showPivot(double[][]): num non-zero "
+
num_non0
+
" ("
+(
100.0
*
num_non0
/
size
/
size
)+
"%)"
);
ShowDoubleFloatArrays
.
showArrays
(
dbg_img
,
title
);
}
}
}
else
{
d
=
terrain
;
if
((
jt
!=
null
)
&&(
tindx
>=
0
))
{
public
static
void
showPivot
(
jt
[
tindx
][
n
]
=
1
;
// sum_a; // d/dterrain
int
[][]
neibs_neibs
,
String
title
)
{
int
size
=
neibs_neibs
.
length
;
double
[]
dbg_img
=
new
double
[
size
*
size
];
for
(
int
i
=
0
;
i
<
size
;
i
++)
{
if
(
neibs_neibs
[
i
]
!=
null
)
{
for
(
int
j
:
neibs_neibs
[
i
])
{
dbg_img
[
i
*
size
+
j
]
=
1.0
;
}
}
}
}
if
(
data_source
[
n
][
DATA_SOURCE_HEAD
][
DATA_SOURCE_HEAD_SINDEX
]
>=
0
)
{
// can only be reference scene? use offset = 0 here.
double
scene_offs
=
vector
[
data_source
[
n
][
DATA_SOURCE_HEAD
][
DATA_SOURCE_HEAD_SINDEX
]];
d
+=
scene_offs
;
if
(
jt
!=
null
)
{
jt
[
data_source
[
n
][
DATA_SOURCE_HEAD
][
DATA_SOURCE_HEAD_SINDEX
]][
n
]
=
1
;
}
}
int
num_diff
=
0
;
int
num_non0
=
0
;
for
(
int
i
=
0
;
i
<
size
;
i
++)
{
for
(
int
j
=
i
+
1
;
j
<
size
;
j
++)
{
if
(
dbg_img
[
i
*
size
+
j
]
!=
0
)
{
num_non0
++;
}
}
fX
[
n
]
=
d
;
if
(
dbg_img
[
i
*
size
+
j
]
!=
dbg_img
[
j
*
size
+
i
])
{
if
(
Double
.
isNaN
(
fX
[
n
]))
{
num_diff
++;
System
.
out
.
println
(
"fX["
+
n
+
"]= NaN"
);
}
}
}
}
}
}
};
if
(
num_diff
!=
0
)
{
System
.
out
.
println
(
"showPivot(int[][]): not-symmetrical, number of mismatches is "
+
num_diff
);
}
}
ImageDtt
.
startAndJoin
(
threads
);
System
.
out
.
println
(
"showPivot(int[][]): num non-zero "
+
num_non0
+
" ("
+(
100.0
*
num_non0
/
size
/
size
)+
"%)"
);
if
(
use_hf
)
{
ShowDoubleFloatArrays
.
showArrays
(
final
int
ind_next
=
data_source
.
length
;
dbg_img
,
ai
.
set
(
0
);
title
);
for
(
int
ithread
=
0
;
ithread
<
threads
.
length
;
ithread
++)
{
threads
[
ithread
]
=
new
Thread
()
{
public
void
run
()
{
for
(
int
n_lf
=
ai
.
getAndIncrement
();
n_lf
<
data_source
.
length
;
n_lf
=
ai
.
getAndIncrement
())
{
if
(
n_lf
==
dbg_n
)
{
System
.
out
.
println
(
"getFxDerivs(): n_lf="
+
n_lf
);
}
int
n_hf
=
n_lf
+
ind_next
;
int
[]
neibs
=
data_source
[
n_lf
][
DATA_SOURCE_NEIB
];
if
(
neibs
!=
null
)
{
// nothing to do if null
double
d_lf
=
fX
[
n_lf
];
// center value
double
swd
=
0
,
sw
=
0
;
for
(
int
dir
=
0
;
dir
<
4
;
dir
++)
{
int
nindx
=
neibs
[
dir
];
if
(
nindx
>=
0
)
{
//see, maybe discard all border tiles
double
d_hf
=
fX
[
neibs
[
dir
]];
// neighbor value
swd
+=
d_hf
;
sw
+=
1
;
}
}
// if (sw > 0) { // nothing to do if 0
if
(
sw
>=
4
)
{
// try discarding all with partial neighbors
swd
/=
sw
;
fX
[
n_hf
]
=
d_lf
-
swd
;
if
(
jt
!=
null
)
{
// center derivative same as _lf with respect to all it depends on except scene offset
int
pindx
=
data_source
[
n_lf
][
DATA_SOURCE_HEAD
][
DATA_SOURCE_HEAD_TINDEX
];
if
(
pindx
>=
0
)
jt
[
pindx
][
n_hf
]
=
jt
[
pindx
][
n_lf
];
// d/dterrain always? not anymore
for
(
int
i
=
0
;
i
<
4
;
i
++
){
if
(
data_source
[
n_lf
][
DATA_SOURCE_CORN_VEGET
]
!=
null
)
{
pindx
=
data_source
[
n_lf
][
DATA_SOURCE_CORN_VEGET
][
i
];
if
(
pindx
>=
0
)
jt
[
pindx
][
n_hf
]
=
jt
[
pindx
][
n_lf
];
// d/dvegetation[i]
}
}
if
(
data_source
[
n_lf
][
DATA_SOURCE_CORN_ALPHA
]
!=
null
)
{
pindx
=
data_source
[
n_lf
][
DATA_SOURCE_CORN_ALPHA
][
i
];
public
static
int
checkSquare
(
if
(
pindx
>=
0
)
jt
[
pindx
][
n_hf
]
=
jt
[
pindx
][
n_lf
];
// d/dalpha[i]
int
[][]
neibs_neibs
,
String
title
)
{
int
size
=
neibs_neibs
.
length
;
double
[]
dbg_img
=
new
double
[
size
*
size
];
for
(
int
i
=
0
;
i
<
size
;
i
++)
{
if
(
neibs_neibs
[
i
]
!=
null
)
{
for
(
int
j
:
neibs_neibs
[
i
])
{
dbg_img
[
i
*
size
+
j
]
=
1.0
;
}
}
}
}
// scale/negate derivatives of all neighbors; -1.0/sw
for
(
int
neib:
neibs
)
if
(
neib
>=
0
)
{
// neib - index in data_source
pindx
=
data_source
[
neib
][
DATA_SOURCE_HEAD
][
DATA_SOURCE_HEAD_TINDEX
];
if
(
pindx
>=
0
)
jt
[
pindx
][
n_hf
]
-=
jt
[
pindx
][
neib
]/
sw
;
// d/dterrain always?
for
(
int
i
=
0
;
i
<
4
;
i
++
){
if
(
data_source
[
neib
][
DATA_SOURCE_CORN_VEGET
]
!=
null
)
{
pindx
=
data_source
[
neib
][
DATA_SOURCE_CORN_VEGET
][
i
];
if
(
pindx
>=
0
)
jt
[
pindx
][
n_hf
]
-=
jt
[
pindx
][
neib
]/
sw
;
// d/dvegetation[i]
}
}
if
(
data_source
[
neib
][
DATA_SOURCE_CORN_ALPHA
]
!=
null
)
{
int
num_diff
=
0
;
pindx
=
data_source
[
neib
][
DATA_SOURCE_CORN_ALPHA
][
i
];
for
(
int
i
=
0
;
i
<
size
;
i
++)
{
if
(
pindx
>=
0
)
jt
[
pindx
][
n_hf
]
-=
jt
[
pindx
][
neib
]/
sw
;
// d/dalpha[i]
for
(
int
j
=
i
+
1
;
j
<
size
;
j
++)
{
if
(
dbg_img
[
i
*
size
+
j
]
!=
dbg_img
[
j
*
size
+
i
])
{
num_diff
++;
}
}
}
}
}
}
return
num_diff
;
}
}
private
double
[][]
getFxDerivsDelta
(
double
[]
vector
,
final
double
delta
,
final
int
debug_level
)
{
int
dbg_n
=
-
2256
;
// 1698; // -2486;
double
[][]
jt
=
new
double
[
vector
.
length
][
weights
.
length
];
for
(
int
nv
=
0
;
nv
<
vector
.
length
;
nv
++)
{
if
(
nv
==
dbg_n
)
{
System
.
out
.
println
(
"getFxDerivsDelta(): nv="
+
nv
);
}
}
boolean
is_elevation
=
par_rindex
[
nv
][
0
]
==
TVAO_ELEVATION
;
// elev_sum_weights
double
[]
vpm
=
vector
.
clone
();
vpm
[
nv
]+=
0.5
*
delta
;
if
(
is_elevation
)
{
elev_sum_weights
=
null
;
// force re-calculation
}
}
double
[]
fx_p
=
getFxDerivs
(
vpm
,
null
,
// final double [][] jt, // should be null or initialized with [vector.length][]
debug_level
);
vpm
[
nv
]-=
delta
;
if
(
is_elevation
)
{
elev_sum_weights
=
null
;
// force re-calculation
}
}
double
[]
fx_m
=
getFxDerivs
(
vpm
,
null
,
// final double [][] jt, // should be null or initialized with [vector.length][]
debug_level
);
for
(
int
i
=
0
;
i
<
weights
.
length
;
i
++)
if
(
weights
[
i
]
>
0
)
{
jt
[
nv
][
i
]
=
(
fx_p
[
i
]-
fx_m
[
i
])/
delta
;
}
}
};
}
}
ImageDtt
.
startAndJoin
(
threads
);
return
jt
;
}
// regularization weights and derivatives
int
ind_next
=
y_vector
.
length
;
if
(
use_scenes_pull0
)
{
// single sample after differences
fX
[
ind_next
]
=
0.0
;
for
(
int
n
=
0
;
n
<
num_pars_scenes
;
n
++)
{
// only count adjustable scene offsets
int
np
=
ind_pars_scenes
+
n
;
// index of the alpha parameter
int
nscene
=
par_rindex
[
np
][
1
];
double
sw
=
scene_weights
[
nscene
]
*
scale_scenes_pull
;
fX
[
ind_next
]
+=
sw
*
vector
[
np
];
if
(
jt
!=
null
)
{
jt
[
np
][
ind_next
]
=
sw
;
}
}
ind_next
++;
}
// splitting alpha_lpf from alpha_loss+alpha_push
if
(
fits
[
TVAO_ALPHA
]
&&
((
alpha_loss
>
0
)
||
(
alpha_push
>
0
)))
{
int
dbg_nx
=
-
76340
;
final
int
ind_y_alpha_loss
=
ind_next
;
ind_next
+=
num_pars_alpha
;
ai
.
set
(
0
);
for
(
int
ithread
=
0
;
ithread
<
threads
.
length
;
ithread
++)
{
threads
[
ithread
]
=
new
Thread
()
{
public
void
run
()
{
for
(
int
n
=
ai
.
getAndIncrement
();
n
<
num_pars_alpha
;
n
=
ai
.
getAndIncrement
())
{
int
np
=
ind_pars_alpha
+
n
;
// index of the alpha parameter
int
nx
=
n
+
ind_y_alpha_loss
;
// y_vector.length; // x - index
if
(
nx
==
dbg_nx
)
{
System
.
out
.
println
(
"getFxDerivs(): n="
+
n
+
", nx="
+
nx
);
}
double
d
=
0
;
fX
[
nx
]
=
0.0
;
if
(
alpha_loss
>
0
)
{
double
alpha
=
vector
[
np
];
if
(
alpha
<
alpha_offset
)
{
d
=
alpha
-
alpha_offset
;
}
else
if
(
alpha
>
(
1
-
alpha_offset
))
{
d
=
alpha
-
(
1.0
-
alpha_offset
);
}
if
(
d
!=
0
)
{
fX
[
nx
]
=
d
*
d
*
alpha_loss
;
if
(
jt
!=
null
)
{
jt
[
np
][
nx
]
=
2
*
alpha_loss
*
d
;
// d/dalpha[i]
}
}
}
double
avg
=
0
;
int
nn
=
0
;
double
neib_min
=
Double
.
POSITIVE_INFINITY
,
neib_max
=
Double
.
NEGATIVE_INFINITY
;
for
(
int
i
=
0
;
i
<
alpha_neibs
[
n
].
length
;
i
++)
{
// now 4, may be increased
int
di
=
alpha_neibs
[
n
][
i
];
d
=
0
;
if
(
di
>=
0
)
{
d
=
vector
[
di
];
// d - full parameter index
avg
+=
d
;
if
(
d
<
neib_min
)
neib_min
=
d
;
if
(
d
>
neib_max
)
neib_max
=
d
;
nn
++;
}
else
if
(
di
<
-
1
)
{
d
=
tvao
[
TVAO_ALPHA
][-
di
-
2
];
avg
+=
d
;
if
(
d
<
neib_min
)
neib_min
=
d
;
if
(
d
>
neib_max
)
neib_max
=
d
;
nn
++;
}
}
avg
/=
nn
;
// average
if
(
alpha_push
>
0
)
{
/*
* p = alpha_push
* n = alpha_push_neutral
* a0 = p/(2 * n)
* a1 = p/(2 * (1 - n))
* f1(x) = p/(2 * n)* x * (1 - x/(2*n))
* f2(x) = p * (x - 2*n + 1) * (1 - x) / (4 * (1-n)*(1-n)
* df1(x)/dx = p/(2 * n^2 )*( n - x)
* df2(x)/dx = p* (n-x)/(2*(1-n)^2)
*/
// average including center:
double
sum_w
=
(
nn
+
alpha_push_center
);
double
avg_c
=
(
avg
*
nn
+
vector
[
np
]
*
alpha_push_center
)
/
sum_w
;
double
one_minus_n
=
1.0
-
alpha_push_neutral
;
double
one_minus_p
=
1.0
-
alpha_push
;
double
x_minus_p
=
avg_c
-
alpha_push
;
if
((
avg_c
>
0
)
||
(
avg_c
<
1.0
))
{
// fX[nx] += alpha_push * avg_c * (1.0 - avg_c);
if
(
avg_c
<=
alpha_push_neutral
)
{
fX
[
nx
]
+=
alpha_push
/
(
2
*
alpha_push_neutral
)
*
avg_c
*
(
1
-
avg_c
/
(
2
*
alpha_push_neutral
));
}
else
{
fX
[
nx
]
+=
alpha_push
*
(
avg_c
-
2
*
alpha_push_neutral
+
1
)
*
(
1
-
avg_c
)
/
(
4
*
one_minus_n
*
one_minus_n
);
}
if
(
jt
!=
null
)
{
// double dd = alpha_push / sum_w * (1 - 2 * avg_c);
double
dd
;
if
(
avg_c
<=
alpha_push_neutral
)
{
dd
=
alpha_push
/
(
sum_w
*
2
*
alpha_push_neutral
*
alpha_push_neutral
)
*
(
alpha_push_neutral
-
avg_c
);
}
else
{
dd
=
alpha_push
/
(
sum_w
*
2
*
one_minus_n
*
one_minus_n
)
*
(
alpha_push_neutral
-
avg_c
);
}
jt
[
np
][
nx
]
+=
dd
*
alpha_push_center
;
// 2 * alpha_loss * d; // d/dalpha[i]
for
(
int
i
=
0
;
i
<
alpha_neibs
[
n
].
length
;
i
++)
{
// now 4, may be increased
int
di
=
alpha_neibs
[
n
][
i
];
if
(
di
>=
0
)
{
jt
[
di
][
nx
]
+=
dd
;
// 2 * alpha_loss * d; // d/dalpha[i]
}
}
}
}
}
}
}
};
}
ImageDtt
.
startAndJoin
(
threads
);
}
// if ((alpha_loss > 0) || (alpha_push > 0)){
if
(
fits
[
TVAO_ALPHA
]
&&
(
alpha_lpf
>=
0
))
{
int
dbg_nx
=
-
76340
;
final
int
ind_y_alpha_lpf
=
ind_next
;
ind_next
+=
num_pars_alpha
;
ai
.
set
(
0
);
for
(
int
ithread
=
0
;
ithread
<
threads
.
length
;
ithread
++)
{
threads
[
ithread
]
=
new
Thread
()
{
public
void
run
()
{
for
(
int
n
=
ai
.
getAndIncrement
();
n
<
num_pars_alpha
;
n
=
ai
.
getAndIncrement
())
{
int
np
=
ind_pars_alpha
+
n
;
// index of the alpha parameter
int
nx
=
n
+
ind_y_alpha_lpf
;
// y_vector.length; // x - index
if
(
nx
==
dbg_nx
)
{
System
.
out
.
println
(
"getFxDerivs(): n="
+
n
+
", nx="
+
nx
);
}
double
d
=
0
;
fX
[
nx
]
=
0.0
;
double
avg
=
0
;
int
nn
=
0
;
double
neib_min
=
Double
.
POSITIVE_INFINITY
,
neib_max
=
Double
.
NEGATIVE_INFINITY
;
for
(
int
i
=
0
;
i
<
alpha_neibs
[
n
].
length
;
i
++)
{
// now 4, may be increased
int
di
=
alpha_neibs
[
n
][
i
];
d
=
0
;
if
(
di
>=
0
)
{
d
=
vector
[
di
];
// d - full parameter index
avg
+=
d
;
if
(
d
<
neib_min
)
neib_min
=
d
;
if
(
d
>
neib_max
)
neib_max
=
d
;
nn
++;
}
else
if
(
di
<
-
1
)
{
d
=
tvao
[
TVAO_ALPHA
][-
di
-
2
];
avg
+=
d
;
if
(
d
<
neib_min
)
neib_min
=
d
;
if
(
d
>
neib_max
)
neib_max
=
d
;
nn
++;
}
}
avg
/=
nn
;
// average
// add cost for difference between this alpha and average of 4 neighbors (when they exist
// applies to alpha before cosine, so it will pull borders even when alpha<0 or alpha > 1 (zero derivatives)
//alpha_scale_avg
double
mm
=
neib_min
+
(
neib_max
-
neib_min
)
*
alpha_mm_hole
;
double
effective_alpha_lpf
=
alpha_lpf
;
if
(
alpha_en_holes
&&
!
Double
.
isNaN
(
alpha_mm_hole
)
&&
(
vector
[
np
]
<=
mm
)
&&
((
neib_max
-
neib_min
)
>=
alpha_diff_hole
))
{
effective_alpha_lpf
=
0.0
;
// disable alpha_lpf
}
// disable pull to average of neighbors for small holes in vegetation (local "almost minimum")
/// if (Double.isNaN(alpha_mm_hole) || (vector[np] > mm) || ((neib_max-neib_min) < alpha_diff_hole)) {
// this.alpha_mm_hole = alpha_mm_hole;
if
(
alpha_scale_avg
==
1
)
{
fX
[
nx
]
=
effective_alpha_lpf
*
(
vector
[
np
]
-
avg
);
if
(
jt
!=
null
)
{
jt
[
np
][
nx
]
+=
effective_alpha_lpf
;
for
(
int
i
=
0
;
i
<
alpha_neibs
[
n
].
length
;
i
++)
{
// now 4, may be increased
int
di
=
alpha_neibs
[
n
][
i
];
if
(
di
>=
0
)
{
jt
[
di
][
nx
]
-=
effective_alpha_lpf
/
nn
;
}
}
}
}
else
{
if
((
avg
>
0
)
&&
(
avg
<
1
))
{
double
savg
=
0.5
+
(
avg
-
0.5
)
*
alpha_scale_avg
;
fX
[
nx
]
=
effective_alpha_lpf
*
(
vector
[
np
]
-
savg
);
if
(
jt
!=
null
)
{
// jt[np][nx] += effective_alpha_lpf;
for
(
int
i
=
0
;
i
<
alpha_neibs
[
n
].
length
;
i
++)
{
// now 4, may be increased
int
di
=
alpha_neibs
[
n
][
i
];
if
(
di
>=
0
)
{
jt
[
di
][
nx
]
-=
effective_alpha_lpf
/
nn
*
alpha_scale_avg
;
}
}
}
}
else
{
if
(
avg
<=
0
)
{
fX
[
nx
]
=
effective_alpha_lpf
*
(
vector
[
np
]
+
0.5
*(
alpha_scale_avg
-
1.0
));
}
else
{
// avg > 1
fX
[
nx
]
=
effective_alpha_lpf
*
(
vector
[
np
]
-
0.5
*(
alpha_scale_avg
+
1.0
));
}
}
if
(
jt
!=
null
)
{
jt
[
np
][
nx
]
+=
effective_alpha_lpf
;
}
}
}
}
};
}
ImageDtt
.
startAndJoin
(
threads
);
}
// if (alpha_lpf >= 0) {
if
(
fits
[
TVAO_TERRAIN
]
&&
(
terr_lpf
>=
0
))
{
final
int
ind_y_terr
=
ind_next
;
ind_next
+=
num_pars_terrain
;
ai
.
set
(
0
);
for
(
int
ithread
=
0
;
ithread
<
threads
.
length
;
ithread
++)
{
threads
[
ithread
]
=
new
Thread
()
{
public
void
run
()
{
for
(
int
n
=
ai
.
getAndIncrement
();
n
<
num_pars_terrain
;
n
=
ai
.
getAndIncrement
())
{
int
np
=
ind_pars_terrain
+
n
;
// index of the terrain parameter
int
nx
=
n
+
ind_y_terr
;
// y_vector.length; // x - index
double
d
=
0
;
if
(
terr_lpf
>
0
)
{
double
avg
=
0
;
int
nn
=
0
;
for
(
int
i
=
0
;
i
<
terr_neibs
[
n
].
length
;
i
++)
{
// now 4, may be increased
int
di
=
terr_neibs
[
n
][
i
];
d
=
0
;
if
(
di
>=
0
)
{
d
=
vector
[
di
];
// d - full parameter index
avg
+=
d
;
nn
++;
}
else
if
(
di
<
-
1
)
{
d
=
tvao
[
TVAO_TERRAIN
][-
di
-
2
];
avg
+=
d
;
nn
++;
}
}
avg
/=
nn
;
// average
fX
[
nx
]
+=
terr_lpf
*
(
vector
[
np
]
-
avg
);
// + terr_pull0 * (vector[np]);
if
(
terr_pull0
>
0
)
{
int
findx
=
data_source
[
np
][
DATA_SOURCE_HEAD
][
DATA_SOURCE_HEAD_FINDEX
];
double
terr_pull
=
terrain_average
[
findx
];
// maybe use tvao[TVAO_TERRAIN][findx] ?
if
(
Double
.
isNaN
(
terr_pull
))
{
terr_pull
=
0
;
// should not happen
}
fX
[
nx
]
+=
terr_pull0
*
(
vector
[
np
]
-
terr_pull
);
}
if
(
jt
!=
null
)
{
jt
[
np
][
nx
]
+=
terr_lpf
+
terr_pull0
;
for
(
int
i
=
0
;
i
<
terr_neibs
[
n
].
length
;
i
++)
{
// now 4, may be increased
int
di
=
terr_neibs
[
n
][
i
];
if
(
di
>=
0
)
{
jt
[
di
][
nx
]
-=
terr_lpf
/
nn
;
}
}
}
}
}
}
};
}
ImageDtt
.
startAndJoin
(
threads
);
}
if
(
fits
[
TVAO_VEGETATION
]
&&
(
veget_lpf
>=
0
))
{
// should be positive for pull0 and terr_pull_cold (difference between vegetation and terrain)
final
int
ind_y_veget
=
ind_next
;
ind_next
+=
num_pars_vegetation
;
ai
.
set
(
0
);
for
(
int
ithread
=
0
;
ithread
<
threads
.
length
;
ithread
++)
{
threads
[
ithread
]
=
new
Thread
()
{
public
void
run
()
{
for
(
int
n
=
ai
.
getAndIncrement
();
n
<
num_pars_vegetation
;
n
=
ai
.
getAndIncrement
())
{
int
np
=
ind_pars_vegetation
+
n
;
// index of the alpha parameter
int
nx
=
n
+
ind_y_veget
;
// y_vector.length; // x - index
double
d
=
0
;
if
(
veget_lpf
>
0
)
{
double
avg
=
0
;
int
nn
=
0
;
for
(
int
i
=
0
;
i
<
veget_neibs
[
n
].
length
;
i
++)
{
// now 4, may be increased
int
di
=
veget_neibs
[
n
][
i
];
d
=
0
;
if
(
di
>=
0
)
{
d
=
vector
[
di
];
// d - full parameter index
avg
+=
d
;
nn
++;
}
else
if
(
di
<
-
1
)
{
d
=
tvao
[
TVAO_VEGETATION
][-
di
-
2
];
avg
+=
d
;
nn
++;
}
}
avg
/=
nn
;
// average
fX
[
nx
]
+=
veget_lpf
*
(
vector
[
np
]
-
avg
);
// + veget_pull0 * vector[np];
if
(
veget_pull0
>
0
)
{
int
findx
=
data_source
[
np
][
DATA_SOURCE_HEAD
][
DATA_SOURCE_HEAD_FINDEX
];
double
veget_pull
=
vegetation_pull
[
findx
];
// maybe use tvao[TVAO_TERRAIN][findx] ?
if
(
Double
.
isNaN
(
veget_pull
))
{
veget_pull
=
0
;
// should not happen unless too far from the vegetation
}
fX
[
nx
]
+=
veget_pull0
*
(
vector
[
np
]
-
veget_pull
);
}
if
(
jt
!=
null
)
{
jt
[
np
][
nx
]
+=
veget_lpf
+
veget_pull0
;
for
(
int
i
=
0
;
i
<
veget_neibs
[
n
].
length
;
i
++)
{
// now 4, may be increased
int
di
=
veget_neibs
[
n
][
i
];
if
(
di
>=
0
)
{
jt
[
di
][
nx
]
-=
veget_lpf
/
nn
;
}
}
}
if
(
terr_pull_cold
>
0
)
{
// find corresponding terrain
int
findx
=
par_rindex
[
np
][
1
];
int
tindx
=
par_index
[
TVAO_TERRAIN
][
findx
];
if
(
tindx
>=
0
)
{
// only if both terrain and vegetation exist for this pixel
int
ntp
=
ind_pars_terrain
+
tindx
;
// index in vector[]
fX
[
nx
]
+=
terr_pull_cold
*
(
vector
[
np
]
-
vector
[
ntp
]
-
terr_difference
);
if
(
jt
!=
null
)
{
jt
[
np
][
nx
]
+=
terr_pull_cold
;
jt
[
ntp
][
nx
]
-=
terr_pull_cold
;
}
}
}
}
}
}
};
}
ImageDtt
.
startAndJoin
(
threads
);
}
return
fX
;
}
private
double
[][]
getFxDerivsDelta
(
double
[]
vector
,
final
double
delta
,
final
int
debug_level
)
{
int
dbg_n
=
-
2256
;
// 1698; // -2486;
double
[][]
jt
=
new
double
[
vector
.
length
][
weights
.
length
];
for
(
int
nv
=
0
;
nv
<
vector
.
length
;
nv
++)
{
if
(
nv
==
dbg_n
)
{
System
.
out
.
println
(
"getFxDerivsDelta(): nv="
+
nv
);
}
boolean
is_elevation
=
par_rindex
[
nv
][
0
]
==
TVAO_ELEVATION
;
// elev_sum_weights
double
[]
vpm
=
vector
.
clone
();
vpm
[
nv
]+=
0.5
*
delta
;
if
(
is_elevation
)
{
elev_sum_weights
=
null
;
// force re-calculation
}
double
[]
fx_p
=
getFxDerivs
(
vpm
,
null
,
// final double [][] jt, // should be null or initialized with [vector.length][]
debug_level
);
vpm
[
nv
]-=
delta
;
if
(
is_elevation
)
{
elev_sum_weights
=
null
;
// force re-calculation
}
double
[]
fx_m
=
getFxDerivs
(
vpm
,
null
,
// final double [][] jt, // should be null or initialized with [vector.length][]
debug_level
);
for
(
int
i
=
0
;
i
<
weights
.
length
;
i
++)
if
(
weights
[
i
]
>
0
)
{
jt
[
nv
][
i
]
=
(
fx_p
[
i
]-
fx_m
[
i
])/
delta
;
}
}
return
jt
;
}
}
private
int
[][]
getParamDebugIndices
(
private
int
[][]
getParamDebugIndices
(
...
@@ -3415,6 +3224,7 @@ public class VegetationLMA {
...
@@ -3415,6 +3224,7 @@ public class VegetationLMA {
// scale alpha for the same image
// scale alpha for the same image
int
w
=
i
%
woi_width4
;
int
w
=
i
%
woi_width4
;
int
h
=
i
/
woi_width4
;
int
h
=
i
/
woi_width4
;
if
(
h
<
woi_veg
.
height
)
{
int
nsub
=
w
/
(
woi_veg
.
width
+
gap
);
int
nsub
=
w
/
(
woi_veg
.
width
+
gap
);
if
(
nsub
==
2
)
{
// alpha
if
(
nsub
==
2
)
{
// alpha
dbg_img
[
i
]
=
debug_alpha_scales
[
0
]
+
(
debug_alpha_scales
[
1
]
-
debug_alpha_scales
[
0
])
*
dbg_img
[
i
];
dbg_img
[
i
]
=
debug_alpha_scales
[
0
]
+
(
debug_alpha_scales
[
1
]
-
debug_alpha_scales
[
0
])
*
dbg_img
[
i
];
...
@@ -3423,6 +3233,7 @@ public class VegetationLMA {
...
@@ -3423,6 +3233,7 @@ public class VegetationLMA {
}
}
}
}
}
}
}
if
(
wh
!=
null
)
{
if
(
wh
!=
null
)
{
wh
[
0
]
=
woi_width4
;
wh
[
0
]
=
woi_width4
;
wh
[
1
]
=
indices
.
length
/
woi_width4
;
wh
[
1
]
=
indices
.
length
/
woi_width4
;
...
@@ -4258,13 +4069,14 @@ public class VegetationLMA {
...
@@ -4258,13 +4069,14 @@ public class VegetationLMA {
// scale alpha for the same image
// scale alpha for the same image
int
w
=
i
%
woi_width4
;
int
w
=
i
%
woi_width4
;
int
h
=
i
/
woi_width4
;
int
h
=
i
/
woi_width4
;
if
(
h
<
woi_veg
.
height
)
{
int
nsub
=
w
/
(
woi_veg
.
width
+
gap
);
int
nsub
=
w
/
(
woi_veg
.
width
+
gap
);
if
(
nsub
==
2
)
{
// alpha
if
(
nsub
==
2
)
{
// alpha
d
=
(
d
-
debug_alpha_scales
[
0
])/(
debug_alpha_scales
[
1
]
-
debug_alpha_scales
[
0
]);
d
=
(
d
-
debug_alpha_scales
[
0
])/(
debug_alpha_scales
[
1
]
-
debug_alpha_scales
[
0
]);
}
else
if
(
nsub
==
3
)
{
// elevation
}
else
if
(
nsub
==
3
)
{
// elevation
d
=
(
d
-
debug_alpha_scales
[
2
])/(
debug_alpha_scales
[
3
]
-
debug_alpha_scales
[
2
]);
d
=
(
d
-
debug_alpha_scales
[
2
])/(
debug_alpha_scales
[
3
]
-
debug_alpha_scales
[
2
]);
}
}
}
vector
[
indices
[
i
][
1
]]
=
d
;
vector
[
indices
[
i
][
1
]]
=
d
;
}
}
}
}
...
@@ -4474,6 +4286,75 @@ public class VegetationLMA {
...
@@ -4474,6 +4286,75 @@ public class VegetationLMA {
return
alpha_neibs
;
return
alpha_neibs
;
}
}
public
static
void
addSecondNeighbors
(
final
int
[][]
second_neibs
,
final
int
[][]
par_neibs
,
final
int
start_index
)
{
if
((
par_neibs
==
null
)
||
(
par_neibs
.
length
==
0
))
{
return
;
// nothing to do
}
final
Thread
[]
threads
=
ImageDtt
.
newThreadArray
();
final
AtomicInteger
ai
=
new
AtomicInteger
(
0
);
for
(
int
ithread
=
0
;
ithread
<
threads
.
length
;
ithread
++)
{
threads
[
ithread
]
=
new
Thread
()
{
public
void
run
()
{
HashSet
<
Integer
>
neibs_set
=
new
HashSet
<
Integer
>();
for
(
int
nPar
=
ai
.
getAndIncrement
();
nPar
<
par_neibs
.
length
;
nPar
=
ai
.
getAndIncrement
()){
neibs_set
.
clear
();
int
full_par
=
nPar
+
start_index
;
neibs_set
.
add
(
full_par
);
int
[]
this_neibs
=
par_neibs
[
nPar
];
for
(
int
dir
=
0
;
dir
<
this_neibs
.
length
;
dir
++)
{
int
neib
=
this_neibs
[
dir
];
if
(
neib
>=
0
)
{
int
neib_rel
=
neib
-
start_index
;
if
((
neib_rel
>=
par_neibs
.
length
)
||
(
neib_rel
<
0
)){
System
.
out
.
println
(
"addSecondNeighbors() BUG: nPar="
+
nPar
+
" neib="
+
neib
+
" neib_rel="
+
neib_rel
+
" >= par_neibs.length="
+
par_neibs
.
length
+
" or < 0"
);
continue
;
}
neibs_set
.
add
(
neib
);
int
[]
this_neib_neibs
=
par_neibs
[
neib_rel
];
for
(
int
dir_neib
=
0
;
dir_neib
<
this_neib_neibs
.
length
;
dir_neib
++)
{
int
neib_neib
=
this_neib_neibs
[
dir_neib
];
if
(
neib_neib
>=
0
)
{
int
neib_neib_rel
=
neib_neib
-
start_index
;
if
((
neib_neib_rel
>=
par_neibs
.
length
)
||
(
neib_neib_rel
<
0
)){
System
.
out
.
println
(
"addSecondNeighbors() BUG: nPar="
+
nPar
+
" neib="
+
neib
+
" neib_rel="
+
neib_rel
+
" neib_neib="
+
neib_neib
+
" neib_neib_rel="
+
neib_neib_rel
+
" >= par_neibs.length="
+
par_neibs
.
length
+
" or < 0"
);
continue
;
}
neibs_set
.
add
(
neib_neib
);
}
}
}
}
// for (int dir = 0; dir < this_neibs.length; dir ++) {
if
(!
neibs_set
.
isEmpty
())
{
if
(
second_neibs
[
full_par
]
!=
null
)
{
for
(
int
neib:
second_neibs
[
full_par
])
{
neibs_set
.
add
(
neib
);
}
}
//https://stackoverflow.com/questions/2451184/how-can-i-convert-a-java-hashsetinteger-to-a-primitive-int-array
int
[]
neibs_arr
=
neibs_set
.
stream
().
mapToInt
(
Integer:
:
intValue
).
toArray
();
Arrays
.
sort
(
neibs_arr
);
// optional
second_neibs
[
full_par
]
=
neibs_arr
;
}
}
}
};
}
ImageDtt
.
startAndJoin
(
threads
);
}
private
void
setupParametersVector
()
{
private
void
setupParametersVector
()
{
// setup terrain parameters (use inner woi)
// setup terrain parameters (use inner woi)
for
(
int
drow
=
0
;
drow
<
woi
.
height
;
drow
++)
{
for
(
int
drow
=
0
;
drow
<
woi
.
height
;
drow
++)
{
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment