Commit 38475568 authored by Andrey Filippov's avatar Andrey Filippov

filtering supertiles connections

parent be191b98
......@@ -72,7 +72,29 @@ public class ConnectionCosts {
this.steps = steps;
}
public int [][][] setStarValues( // also initConnectionCosts()
int [] nsTiles,
int debugLevel)
{
return initConnectionCosts(
true, // boolean set_start_planes,
nsTiles,
debugLevel);
}
public int [][][] initConnectionCosts(
int [] nsTiles,
int debugLevel)
{
return initConnectionCosts(
false, // boolean set_start_planes,
nsTiles,
debugLevel);
}
public int [][][] initConnectionCosts(
boolean set_start_planes,
int [] nsTiles,
int debugLevel)
{
......@@ -115,16 +137,19 @@ public class ConnectionCosts {
switch (steps){
case 1:
val_weights = getConnectionsCostSingleStep (
set_start_planes,
null,
debugLevel - 1); // int debugLevel)
break;
case 2:
val_weights = getConnectionsCostDualStep (
set_start_planes,
null,
debugLevel - 1); // int debugLevel)
break;
default:
val_weights = getConnectionsCostSingleStep (
set_start_planes,
null,
debugLevel - 1); // int debugLevel)
}
......@@ -155,6 +180,9 @@ public class ConnectionCosts {
return neibs_init; // neighbors to clone
}
public double [] getValWeightLast(
int nsTile,
int nl,
......@@ -186,6 +214,7 @@ public class ConnectionCosts {
}
public double [][][] getConnectionsCostSingleStep (
boolean set_start_planes,
int [][][] neibs,
int debugLevel)
{
......@@ -211,6 +240,23 @@ public class ConnectionCosts {
}
}
if (neibs_changed){
TilePlanes.PlaneData star_plane = getStarPlane(
nsTile,
nl,
neibs[isTile][nl],
orthoWeight,
diagonalWeight,
starPwr, // double starPwr, // Divide cost by number of connections to this power
starWeightPwr,
tnSurface,
preferDisparity,
-1); // debugLevel);
if (set_start_planes){
planes[nsTile][nl].setStarPlane(star_plane);
}
vw[isTile][nl] = star_plane.getStarValueWeightDensity();
/*
vw[isTile][nl] = getStarValueWeight(
nsTile,
nl,
......@@ -222,6 +268,7 @@ public class ConnectionCosts {
tnSurface,
preferDisparity,
-1); // debugLevel);
*/
} else {
vw[isTile][nl] = val_weights[isTile][nl];
}
......@@ -238,6 +285,7 @@ public class ConnectionCosts {
}
public double [][][] getConnectionsCostDualStep (
boolean set_start_planes,
int [][][] neibs,
int debugLevel)
{
......@@ -270,17 +318,6 @@ public class ConnectionCosts {
int ineib1 = (tile_map.containsKey(nsTile1))? tile_map.get(nsTile1) : -1;
neibs2[dir] = (ineib1 >=0) ? neibs[tile_map.get(nsTile1)][nl1] : planes[nsTile1][nl1].getNeibBest();
if (!neibs_changed && (ineib1 >= 0)) {
/*
if ((neibs_init[ineib1] == null) || (neibs_init[ineib1][nl1] == null)) {
neibs_changed = true;
} else {
for (int dir1 = 0; dir1 < 8; dir1++) if (neibs[ineib1][nl1][dir1] != neibs_init[ineib1][nl1][dir1]){
neibs_changed = true;
break;
}
}
*/
if ((neibs_init[ineib1] == null) || (neibs[ineib1][nl1] == null)) {
neibs_changed = true;
} else {
......@@ -294,6 +331,23 @@ public class ConnectionCosts {
}
if (neibs_changed){
TilePlanes.PlaneData star_plane = getStarPlane2(
nsTile,
nl,
neibs0,
neibs2,
orthoWeight,
diagonalWeight,
starPwr, // double starPwr, // Divide cost by number of connections to this power
starWeightPwr, //
tnSurface,
preferDisparity,
-1); // debugLevel);
if (set_start_planes){
planes[nsTile][nl].setStarPlane(star_plane);
}
vw[isTile][nl] = star_plane.getStarValueWeightDensity();
/*
vw[isTile][nl] = getStarValueWeight2(
nsTile,
nl,
......@@ -306,6 +360,7 @@ public class ConnectionCosts {
tnSurface,
preferDisparity,
-1); // debugLevel);
*/
} else {
vw[isTile][nl] = val_weights[isTile][nl];
}
......@@ -360,16 +415,19 @@ public class ConnectionCosts {
switch (steps){
case 1:
vw = getConnectionsCostSingleStep (
false,
neibs,
debugLevel-1); // int debugLevel)
break;
case 2:
vw = getConnectionsCostDualStep (
false,
neibs,
debugLevel-1); // int debugLevel)
break;
default:
vw = getConnectionsCostSingleStep (
false,
neibs,
debugLevel-1); // int debugLevel)
}
......@@ -426,6 +484,31 @@ public class ConnectionCosts {
TileSurface.TileNeibs tnSurface,
boolean preferDisparity,
int debugLevel)
{
return getStarPlane(
nsTile,
nl,
neibs,
orthoWeight,
diagonalWeight,
starPwr, // Divide cost by number of connections to this power
starWeightPwr, // Use this power of tile weight when calculating connection cost
tnSurface,
preferDisparity,
debugLevel).getStarValueWeightDensity();
}
TilePlanes.PlaneData getStarPlane(
int nsTile,
int nl,
int [] neibs,
double orthoWeight,
double diagonalWeight,
double starPwr, // Divide cost by number of connections to this power
double starWeightPwr, // Use this power of tile weight when calculating connection cost
TileSurface.TileNeibs tnSurface,
boolean preferDisparity,
int debugLevel)
{
TilePlanes.PlaneData merged_plane = planes[nsTile][nl]; // add weight
double conn_weight = 1.0; // center weight
......@@ -450,9 +533,11 @@ public class ConnectionCosts {
if (starPwr != 0){
value_weight[0] /= (Math.pow((planes[nsTile][nl].getNumNeibBest() + 1.0), starPwr));
}
return value_weight;
merged_plane.setStarValueWeight(value_weight);
return merged_plane;
}
/**
* Calculate main eigenvalue of the current plane and all connected ones - used to estimate advantage of connection swap
* This version uses two steps - not only directly connected, but neighbors' neighbors also, multiple paths to the same
......@@ -484,6 +569,34 @@ public class ConnectionCosts {
TileSurface.TileNeibs tnSurface,
boolean preferDisparity,
int debugLevel)
{
return getStarPlane2(
nsTile,
nl,
neibs,
neibs2, // neighbors' neighbors
orthoWeight,
diagonalWeight,
starPwr, // Divide cost by number of connections to this power
starWeightPwr, // Use this power of tile weight when calculating connection cost
tnSurface,
preferDisparity,
debugLevel).getStarValueWeightDensity();
}
TilePlanes.PlaneData getStarPlane2(
int nsTile,
int nl,
int [] neibs,
int [][] neibs2, // neighbors' neighbors
double orthoWeight,
double diagonalWeight,
double starPwr, // Divide cost by number of connections to this power
double starWeightPwr, // Use this power of tile weight when calculating connection cost
TileSurface.TileNeibs tnSurface,
boolean preferDisparity,
int debugLevel)
{
double [] dir_weight = {orthoWeight, diagonalWeight, orthoWeight, diagonalWeight, orthoWeight, diagonalWeight, orthoWeight, diagonalWeight};
HashMap<Point, Double> tile_weights = new HashMap<Point, Double>();
......@@ -529,9 +642,13 @@ public class ConnectionCosts {
if (starPwr != 0){
value_weight[0] /= (Math.pow(tile_weights.size() + 1.0, starPwr));
}
return value_weight;
merged_plane.setStarValueWeight(value_weight);
return merged_plane;
}
/**
* Calculate array of supertile indices that need to have connection cost recalculated when they are updated
* first entries of the result will be the same in input array
......
......@@ -2179,10 +2179,22 @@ public class EyesisCorrectionParameters {
public double plMaxWorldSin2 = 0.1; // Maximal sine squared of the world angle between planes to merge. Set to >= 1.0 to disable
public double plWeakWorsening = 1.0; // Relax merge requirements for weaker planes
public double plMaxOverlap = 0.1; // Maximal overlap between the same supertile planes to merge
// Merge same supetile planes if at least one is weak and they do not differ much
public double plWeakWeight = 0.2 ; // Maximal weight of the weak plane to merge
public double plWeakEigen = 0.1; // Maximal eigenvalue of the result of non-weighted merge
public double plWeakWeight2 = 10.0 ; // Maximal weight of the weak plane to merge (second variant)
public double plWeakEigen2 = 0.05; // Maximal eigenvalue of the result of non-weighted merge (second variant)
public double plMaxZRatio = 2.0; // Maximal ratio of Z to allow plane merging
public double plMaxDisp = 0.6; // Maximal disparity of one of the planes to apply maximal ratio
public double plCutTail = 1.4; // When merging with neighbors cut the tail that is worse than scaled best
public double plMinTail = 0.015;// Set cutoff value livel not less than
// comparing merge quality for plane pairs
public double plCostKrq = 0.8; // cost of merge quality weighted
public double plCostKrqEq = 0.2; // cost of merge quality equal weight
public double plCostKrq = 0.8; // cost of merge quality weighted in disparity space
public double plCostKrqEq = 0.2; // cost of merge quality equal weight in disparity space
public double plCostWrq = 0.8; // cost of merge quality weighted in world space
public double plCostWrqEq = 0.2; // cost of merge quality equal weight in world space
public double plCostSin2 = 10.0; // cost of sin squared between normals
public double plCostRdist2 =1000.0; // cost of squared relative distances
......@@ -2556,8 +2568,20 @@ public class EyesisCorrectionParameters {
properties.setProperty(prefix+"plWeakWorsening", this.plWeakWorsening +"");
properties.setProperty(prefix+"plMaxOverlap", this.plMaxOverlap +"");
properties.setProperty(prefix+"plWeakWeight", this.plWeakWeight +"");
properties.setProperty(prefix+"plWeakEigen", this.plWeakEigen +"");
properties.setProperty(prefix+"plWeakWeight2", this.plWeakWeight2 +"");
properties.setProperty(prefix+"plWeakEigen2", this.plWeakEigen2 +"");
properties.setProperty(prefix+"plMaxZRatio", this.plMaxZRatio +"");
properties.setProperty(prefix+"plMaxDisp", this.plMaxDisp +"");
properties.setProperty(prefix+"plCutTail", this.plCutTail +"");
properties.setProperty(prefix+"plMinTail", this.plMinTail +"");
properties.setProperty(prefix+"plCostKrq", this.plCostKrq +"");
properties.setProperty(prefix+"plCostKrqEq", this.plCostKrqEq +"");
properties.setProperty(prefix+"plCostWrq", this.plCostWrq +"");
properties.setProperty(prefix+"plCostWrqEq", this.plCostWrqEq +"");
properties.setProperty(prefix+"plCostSin2", this.plCostSin2 +"");
properties.setProperty(prefix+"plCostRdist2", this.plCostRdist2 +"");
......@@ -2909,8 +2933,20 @@ public class EyesisCorrectionParameters {
if (properties.getProperty(prefix+"plWeakWorsening")!=null) this.plWeakWorsening=Double.parseDouble(properties.getProperty(prefix+"plWeakWorsening"));
if (properties.getProperty(prefix+"plMaxOverlap")!=null) this.plMaxOverlap=Double.parseDouble(properties.getProperty(prefix+"plMaxOverlap"));
if (properties.getProperty(prefix+"plWeakWeight")!=null) this.plWeakWeight=Double.parseDouble(properties.getProperty(prefix+"plWeakWeight"));
if (properties.getProperty(prefix+"plWeakEigen")!=null) this.plWeakEigen=Double.parseDouble(properties.getProperty(prefix+"plWeakEigen"));
if (properties.getProperty(prefix+"plWeakWeight2")!=null) this.plWeakWeight2=Double.parseDouble(properties.getProperty(prefix+"plWeakWeight2"));
if (properties.getProperty(prefix+"plWeakEigen2")!=null) this.plWeakEigen2=Double.parseDouble(properties.getProperty(prefix+"plWeakEigen2"));
if (properties.getProperty(prefix+"plMaxZRatio")!=null) this.plMaxZRatio=Double.parseDouble(properties.getProperty(prefix+"plMaxZRatio"));
if (properties.getProperty(prefix+"plMaxDisp")!=null) this.plMaxDisp=Double.parseDouble(properties.getProperty(prefix+"plMaxDisp"));
if (properties.getProperty(prefix+"plCutTail")!=null) this.plCutTail=Double.parseDouble(properties.getProperty(prefix+"plCutTail"));
if (properties.getProperty(prefix+"plMinTail")!=null) this.plMinTail=Double.parseDouble(properties.getProperty(prefix+"plMinTail"));
if (properties.getProperty(prefix+"plCostKrq")!=null) this.plCostKrq=Double.parseDouble(properties.getProperty(prefix+"plCostKrq"));
if (properties.getProperty(prefix+"plCostKrqEq")!=null) this.plCostKrqEq=Double.parseDouble(properties.getProperty(prefix+"plCostKrqEq"));
if (properties.getProperty(prefix+"plCostWrq")!=null) this.plCostWrq=Double.parseDouble(properties.getProperty(prefix+"plCostWrq"));
if (properties.getProperty(prefix+"plCostWrqEq")!=null) this.plCostWrqEq=Double.parseDouble(properties.getProperty(prefix+"plCostWrqEq"));
if (properties.getProperty(prefix+"plCostSin2")!=null) this.plCostSin2=Double.parseDouble(properties.getProperty(prefix+"plCostSin2"));
if (properties.getProperty(prefix+"plCostRdist2")!=null) this.plCostRdist2=Double.parseDouble(properties.getProperty(prefix+"plCostRdist2"));
......@@ -3290,9 +3326,23 @@ public class EyesisCorrectionParameters {
gd.addNumericField("Relax merge requirements for weaker planes", this.plWeakWorsening, 6);
gd.addNumericField("Maximal overlap between the same supertile planes to merge", this.plMaxOverlap, 6);
gd.addMessage ("--- Merge same supetile planes if at least one is weak and they do not differ much ---");
gd.addNumericField("Maximal weight of the weak plane to merge (first variant)", this.plWeakWeight, 6);
gd.addNumericField("Maximal eigenvalue of the result of non-weighted merge (first variant)", this.plWeakEigen, 6);
gd.addNumericField("Maximal weight of the weak plane to merge (second variant)", this.plWeakWeight2, 6);
gd.addNumericField("Maximal eigenvalue of the result of non-weighted merge (second variant)", this.plWeakEigen2, 6);
gd.addMessage ("--- ---");
gd.addNumericField("Maximal ratio of Z to allow plane merging", this.plMaxZRatio, 6);
gd.addNumericField("Maximal disparity of one of the planes to apply maximal ratio", this.plMaxDisp, 6);
gd.addNumericField("When merging with neighbors cut the tail that is worse than scaled best", this.plCutTail, 6);
gd.addNumericField("Set cutoff value livel not less than this", this.plMinTail, 6);
gd.addMessage ("--- Planes merge costs ---");
gd.addNumericField("Cost of merge quality weighted", this.plCostKrq, 6);
gd.addNumericField("Cost of merge quality equal weight", this.plCostKrqEq, 6);
gd.addNumericField("Cost of merge quality weighted in disparity space", this.plCostKrq, 6);
gd.addNumericField("Cost of merge quality equal weight in disparity space", this.plCostKrqEq, 6);
gd.addNumericField("Cost of merge quality weighted in world space", this.plCostWrq, 6);
gd.addNumericField("Cost of merge quality equal weight in world space", this.plCostWrqEq, 6);
gd.addNumericField("Cost of sin squared between normals", this.plCostSin2, 6);
gd.addNumericField("Cost of squared relative plane-to-other-center distances", this.plCostRdist2, 6);
......@@ -3657,8 +3707,20 @@ public class EyesisCorrectionParameters {
this.plWeakWorsening= gd.getNextNumber();
this.plMaxOverlap= gd.getNextNumber();
this.plWeakWeight= gd.getNextNumber();
this.plWeakEigen= gd.getNextNumber();
this.plWeakWeight2= gd.getNextNumber();
this.plWeakEigen2= gd.getNextNumber();
this.plMaxZRatio= gd.getNextNumber();
this.plMaxDisp= gd.getNextNumber();
this.plCutTail= gd.getNextNumber();
this.plMinTail= gd.getNextNumber();
this.plCostKrq= gd.getNextNumber();
this.plCostKrqEq= gd.getNextNumber();
this.plCostWrq= gd.getNextNumber();
this.plCostWrqEq= gd.getNextNumber();
this.plCostSin2= gd.getNextNumber();
this.plCostRdist2= gd.getNextNumber();
......
......@@ -24,10 +24,10 @@
import java.awt.Point;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.concurrent.atomic.AtomicInteger;
public class LinkPlanes {
SuperTiles st;
public boolean plPreferDisparity; // = false;// Always start with disparity-most axis (false - lowest eigenvalue)
......@@ -46,12 +46,25 @@ public class LinkPlanes {
public double plWeakWorsening; // = 1.0; // Relax merge requirements for weaker planes
public double plMaxOverlap; // = 0.1; // Maximal overlap between the same supertile planes to merge
// Merge same supetile planes if at least one is weak and they do not differ much
public double plWeakWeight; // = 0.2; // Maximal weight of the weak plane to merge
public double plWeakEigen; // = 0.1; // Maximal eigenvalue of the result of non-weighted merge
public double plWeakWeight2; // = 10.0 ; // Maximal weight of the weak plane to merge (second variant)
public double plWeakEigen2; // = 0.05; // Maximal eigenvalue of the result of non-weighted merge (second variant)
// comparing merge quality for plane pairs
public double plCostKrq; // = 0.8; // Cost of merge quality weighted
public double plCostKrqEq; // = 0.2; // Cost of merge quality equal weight
public double plCostSin2; // = 100.0; // Cost of sin squared between normals
public double plCostRdist2; // =1000.0; // Cost of squared relative distances
public double plCostKrq; // = 0.8; // cost of merge quality weighted in disparity space
public double plCostKrqEq; // = 0.2; // cost of merge quality equal weight in disparity space
public double plCostWrq; // = 0.8; // cost of merge quality weighted in world space
public double plCostWrqEq; // = 0.2; // cost of merge quality equal weight in world space
public double plCostSin2; // = 10.0; // cost of sin squared between normals
public double plCostRdist2; // =1000.0; // cost of squared relative distances
public double plMaxZRatio; // = 2.5; // Maximal ratio of Z to allow plane merging
public double plMaxDisp; // = 0.5; // Maximal disparity of one of the planes to apply maximal ratio
public double plCutTail; // = 1.4; // When merging with neighbors cut the tail that is worse than scaled best
public double plMinTail; // = 0.015;// Set cutoff value livel not less than
public int dbg_tileX;
public int dbg_tileY;
......@@ -76,37 +89,108 @@ public class LinkPlanes {
plMinStrength = clt_parameters.plMinStrength;
plMaxOverlap = clt_parameters.plMaxOverlap;
plWeakWeight = clt_parameters.plWeakWeight;
plWeakEigen = clt_parameters.plWeakEigen;
plWeakWeight2 = clt_parameters.plWeakWeight2;
plWeakEigen2 = clt_parameters.plWeakEigen2;
plCostKrq = clt_parameters.plCostKrq;
plCostKrqEq = clt_parameters.plCostKrqEq;
plCostWrq = clt_parameters.plCostWrq;
plCostWrqEq = clt_parameters.plCostWrqEq;
plCostSin2 = clt_parameters.plCostSin2;
plCostRdist2 = clt_parameters.plCostRdist2;
plMaxZRatio = clt_parameters.plMaxZRatio;
plMaxDisp = clt_parameters.plMaxDisp;
plCutTail = clt_parameters.plCutTail;
plMinTail = clt_parameters.plMinTail;
dbg_tileX = clt_parameters.tileX;
dbg_tileY = clt_parameters.tileY;
this.st = st;
}
public boolean areWeakSimilar(
TilePlanes.PlaneData plane1, // should belong to the same supertile (or be converted for one)
TilePlanes.PlaneData plane2,
double merged_ev_eq, // if NaN will calculate assuming the same supertile
String prefix,
int debugLevel)
{
return planesFit(
plane1, // should belong to the same supertile (or be converted for one)
plane2,
true, // use for same supertile merge
true, // boolean check_is_weak_only, // only verify if the two planes are close and one is weak
Double.NaN, // if NaN will calculate assuming the same supertile
merged_ev_eq, // if NaN will calculate assuming the same supertile
Double.NaN, // if NaN will calculate assuming the same supertile - for world
Double.NaN, // if NaN will calculate assuming the same supertile - for world
prefix,
debugLevel);
}
public boolean planesFit(
TilePlanes.PlaneData plane1, // should belong to the same supertile (or be converted for one)
TilePlanes.PlaneData plane2,
boolean merge_weak, // use for same supertile merge
double merged_ev, // if NaN will calculate assuming the same supertile
double merged_ev_eq, // if NaN will calculate assuming the same supertile
double merged_wev, // if NaN will calculate assuming the same supertile - for world
double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world
String prefix,
int debugLevel)
{
return planesFit(
plane1, // should belong to the same supertile (or be converted for one)
plane2,
merge_weak, // use for same supertile merge
false, // boolean check_is_weak_only, // only verify if the two planes are close and one is weak
merged_ev, // if NaN will calculate assuming the same supertile
merged_ev_eq, // if NaN will calculate assuming the same supertile
merged_wev, // if NaN will calculate assuming the same supertile - for world
merged_wev_eq, // if NaN will calculate assuming the same supertile - for world
prefix,
debugLevel);
}
public boolean planesFit(
TilePlanes.PlaneData plane1, // should belong to the same supertile (or be converted for one)
TilePlanes.PlaneData plane2,
boolean merge_weak, // use for same supertile merge
boolean check_is_weak_only, // only verify if the two planes are close and one is weak
double merged_ev, // if NaN will calculate assuming the same supertile
double merged_ev_eq, // if NaN will calculate assuming the same supertile
double merged_wev, // if NaN will calculate assuming the same supertile - for world
double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world
String prefix,
int debugLevel)
{
merge_weak |= check_is_weak_only;
if ((plane1 == null) || (plane2 == null)) return false;
boolean dbg = debugLevel > 1;
if (debugLevel > 1){
System.out.println("planesFit() debug:");
}
TilePlanes.PlaneData merged_pd = null;
TilePlanes.PlaneData merged_pd_eq = null;
if (plane1.getWeight() < plMinStrength) {
double disp1 = plane1.getZxy()[0];
double disp2 = plane2.getZxy()[0];
if ((disp1 <= 0) || (disp2 <= 0) || (((disp1 <= plMaxDisp) || (disp2 <= plMaxDisp)) && ((disp1/disp2 > plMaxZRatio) || (disp2/disp1 > plMaxZRatio)))){
if (debugLevel > -1) System.out.println(prefix+" planes have too high disparity ratio ("+disp1+":"+disp2+" > plMaxZRatio="+plMaxZRatio+
", at least one of them < plMaxDisp="+plMaxDisp);
return false;
} else {
if (debugLevel > 0) System.out.println(prefix+" disparity ratio ("+disp1+":"+disp2+" is OK, <= plMaxZRatio="+plMaxZRatio);
}
if (!merge_weak && (plane1.getWeight() < plMinStrength)) {
if (debugLevel > 1) System.out.println(prefix+" plane1 is too weak ("+plane1.getWeight()+" < plMinStrength="+plMinStrength+")");
return false;
}
if (plane2.getWeight() < plMinStrength) {
if (!merge_weak && (plane2.getWeight() < plMinStrength)) {
if (debugLevel > 1) System.out.println(prefix+" plane2 is too weak ("+plane2.getWeight()+" < plMinStrength="+plMinStrength+")");
return false;
}
......@@ -126,7 +210,7 @@ public class LinkPlanes {
return false;
}
if (Double.isNaN(merged_ev)) {
if (!check_is_weak_only && (Double.isNaN(merged_ev) || Double.isNaN(merged_wev))) {
merged_pd = plane1.mergePlaneToThis(
plane2, // PlaneData otherPd,
1.0, // double scale_other,
......@@ -136,8 +220,9 @@ public class LinkPlanes {
plPreferDisparity,
debugLevel - 2); // int debugLevel)
merged_ev = merged_pd.getValue();
merged_wev = merged_pd.getWValue();
}
if (Double.isNaN(merged_ev_eq)) {
if (Double.isNaN(merged_ev_eq) || (!check_is_weak_only && Double.isNaN(merged_wev_eq))) {
merged_pd_eq = plane1.mergePlaneToThis(
plane2, // PlaneData otherPd,
1.0, // double scale_other,
......@@ -147,9 +232,37 @@ public class LinkPlanes {
plPreferDisparity,
debugLevel - 2); // int debugLevel)
merged_ev_eq = merged_pd_eq.getValue();
merged_wev_eq = merged_pd_eq.getWValue();
}
double w1 = plane1.getWeight();
double w2 = plane2.getWeight();
double weakest = (w1 > w2) ? w2 : w1;
double this_rq_eq = mergeRQuality(
plane1.getValue(), // double L1,
plane2.getValue() , // double L2,
merged_ev_eq, // double L,
1.0, // double w1,
1.0, // double w2)
plEigenFloor);// double eigen_floor)
double this_rq_eq_norm = this_rq_eq;
if ((w1 + w2) < plWeakWorsening) this_rq_eq_norm *= (w1 + w2) / plWeakWorsening; // forgive more for weak planes
boolean weak_and_close = false;
if (merge_weak && (weakest <= plWeakWeight) && (merged_ev_eq <= plWeakEigen )){
weak_and_close = true;
if (dbg) System.out.println(prefix+": same supertile planes are weak and close: the weakest ("+weakest+") is below "+plWeakWeight+
" and merged non-weighted eigenvalue ("+merged_ev_eq+") is below "+plWeakEigen);
}
if (merge_weak && (weakest <= plWeakWeight2) && (merged_ev_eq <= plWeakEigen2 )){
weak_and_close = true;
if (dbg) System.out.println(prefix+": same supertile planes are weak and close (variant 2): the weakest ("+weakest+") is below "+plWeakWeight2+
" and merged non-weighted eigenvalue ("+merged_ev_eq+") is below "+plWeakEigen2);
}
// if (check_is_weak_only) return weak_and_close;
double this_rq = mergeRQuality(
plane1.getValue(), // double L1,
plane2.getValue() , // double L2,
......@@ -159,53 +272,123 @@ public class LinkPlanes {
plEigenFloor);// double eigen_floor)
double this_rq_norm = this_rq;
if ((w1 + w2) < plWeakWorsening) this_rq_norm *= (w1 + w2) / plWeakWorsening; // forgive more for weak planes
double this_rq_eq = mergeRQuality(
plane1.getValue(), // double L1,
plane2.getValue() , // double L2,
merged_ev_eq, // double L,
double this_wrq = mergeRQuality(
plane1.getWValue(), // double L1,
plane2.getWValue(), // double L2,
merged_wev, // double L,
w1, // double w1,
w2, // double w2)
0.0);// double eigen_floor)
double this_wrq_norm = this_rq;
if ((w1 + w2) < plWeakWorsening) this_wrq_norm *= (w1 + w2) / plWeakWorsening; // forgive more for weak planes
double this_wrq_eq = mergeRQuality(
plane1.getWValue(), // double L1,
plane2.getWValue(), // double L2,
merged_wev, // double L,
1.0, // double w1,
1.0, // double w2)
plEigenFloor);// double eigen_floor)
0.0);// double eigen_floor)
this_wrq_eq /= (w1 + w2); // for comparison reduce this value for stronger planes
double this_wrq_eq_norm = this_rq_eq;
if ((w1 + w2) < plWeakWorsening) this_wrq_eq_norm *= (w1 + w2) / plWeakWorsening; // forgive more for weak planes
double this_rq_eq_norm = this_rq_eq;
if ((w1 + w2) < plWeakWorsening) this_rq_eq_norm *= (w1 + w2) / plWeakWorsening; // forgive more for weak planes
boolean OK_to_merge = false;
boolean notOK_to_merge = false;
if ((this_rq_norm <= plWorstWorsening) ||
((merged_ev <= plOKMergeEigen) && (this_rq_norm <= plWorstWorsening2)) || // use higher threshold
(this_rq_eq_norm <= plWorstEq) ||
((merged_ev_eq <= plOKMergeEigen) && (this_rq_eq_norm <= plWorstEq2)) // use higher threshold
) {
if ((plMaxWorldSin2 >= 1.0) || (plane1.getWorldSin2(plane2) <= plMaxWorldSin2)) {
if (debugLevel > 0){
System.out.print(prefix+": planes FIT");
if (this_rq_norm <= plWorstWorsening)
System.out.print(" (this_rq_norm="+this_rq_norm+" <= plWorstWorsening="+plWorstWorsening+")");
if ((merged_ev <= plOKMergeEigen) && (this_rq_norm <= plWorstWorsening2))
System.out.print(" merged_ev="+merged_ev+" <= plOKMergeEigen="+plOKMergeEigen+") && (this_rq_norm="+this_rq_norm+
" <= plWorstWorsening2="+plWorstWorsening2+")");
if (this_rq_eq_norm <= plWorstEq)
System.out.print(" this_rq_eq_norm="+this_rq_eq_norm+" <= plWorstEq="+plWorstEq);
if ((merged_ev_eq <= plOKMergeEigen) && (this_rq_eq_norm <= plWorstEq2))
System.out.print(" ((merged_ev_eq="+merged_ev_eq+" <= plOKMergeEigen) && (this_rq_eq_norm="+
if (this_rq_norm <= plWorstWorsening){
OK_to_merge = true;
if (dbg) System.out.println(prefix+": planes may fit (this_rq_norm="+this_rq_norm+" <= plWorstWorsening="+plWorstWorsening+")");
}
if ((!OK_to_merge || dbg) && (merged_ev <= plOKMergeEigen) && (this_rq_norm <= plWorstWorsening2)){
OK_to_merge = true;
if (dbg) System.out.println(prefix+": planes may fit (merged_ev="+merged_ev+
" <= plOKMergeEigen="+plOKMergeEigen+") && (this_rq_norm="+this_rq_norm+
" <= plWorstWorsening2="+plWorstWorsening2+")");
}
if ((!OK_to_merge || dbg) && (this_rq_eq_norm <= plWorstEq)){
OK_to_merge = true;
if (dbg) System.out.println(prefix+": planes may fit (this_rq_eq_norm="+this_rq_eq_norm+" <= plWorstEq="+plWorstEq+")");
}
if ((!OK_to_merge || dbg) && (merged_ev_eq <= plOKMergeEigen) && (this_rq_eq_norm <= plWorstEq2)){
OK_to_merge = true;
if (dbg) System.out.println(prefix+": planes may fit (merged_ev_eq="+merged_ev_eq+" <= plOKMergeEigen) && (this_rq_eq_norm="+
this_rq_eq_norm+" <= plWorstEq2="+plWorstEq2+")");
System.out.println();
if (debugLevel > 1){
System.out.println(prefix+" (fit) this_rq="+this_rq+
", this_rq_eq="+this_rq_eq+
" w1="+w1+" w2="+w2+
" L1="+plane1.getValue()+" L2="+plane2.getValue()+
" L="+merged_ev+" L_eq="+merged_ev_eq);
System.out.println(prefix+" (fit) world sin2 ="+
plane1.getWorldSin2(plane2));
System.out.println(prefix+ " (fit)" +
" world rdist this="+ Math.sqrt(plane1.getWorldPlaneRDist2(plane2))+
", world rdist other="+Math.sqrt(plane2.getWorldPlaneRDist2(plane1))+
", world rdist sum="+Math.sqrt(plane1.getWorldPlaneRDist2(plane2)+
plane2.getWorldPlaneRDist2(plane1)));
}
}
if ((!OK_to_merge || dbg) && (this_wrq_norm <= plWorstWorsening)){
OK_to_merge = true;
if (dbg) System.out.println(prefix+": planes may fit (this_wrq_norm="+this_wrq_norm+" <= plWorstWorsening="+plWorstWorsening+")");
}
if ((!OK_to_merge || dbg) && (this_wrq_eq_norm <= plWorstEq)){
OK_to_merge = true;
if (dbg) System.out.println(prefix+": planes may fit (this_wrq_eq_norm="+this_wrq_eq_norm+" <= plWorstEq="+plWorstEq+")");
}
// do not apply sin2 to weak planes
if ((plMaxWorldSin2 < 1.0) && (weakest > plWeakWeight) && (plane1.getWorldSin2(plane2) > plMaxWorldSin2)) {
notOK_to_merge = true;
if (dbg) System.out.println(prefix+": planes do not fit as sin2 > "+plMaxWorldSin2+" weakest="+weakest);
}
if (weak_and_close){
OK_to_merge = true;
notOK_to_merge = false; // weak can have large angles
if (dbg) System.out.println(prefix+": same supertile planes fit as the weakest ("+weakest+") is below "+plWeakWeight+
" and merged non-weighted eigenvalue ("+merged_ev_eq+") is below "+plWeakEigen);
}
if (check_is_weak_only) {
if (debugLevel>1){
System.out.println(prefix+" weak_and_close = " + weak_and_close+
" this_rq="+this_rq+
" this_rq_eq="+this_rq_eq+
" this_wrq=" + (this_wrq) +
" this_wrq_eq=" + (this_wrq_eq) +
" w1="+w1+" w2="+w2+
" L1="+plane1.getValue()+" L2="+plane2.getValue()+
" L="+merged_ev+" L_eq="+merged_ev_eq+
" L1W="+plane1.getWValue()+" L2W="+plane2.getWValue()+" LW="+merged_wev+
" L_eqW="+merged_wev_eq);
System.out.println(prefix+" (fit) world sin2 ="+
plane1.getWorldSin2(plane2));
System.out.println(prefix+ " (fit)" +
" world rdist this="+ Math.sqrt(plane1.getWorldPlaneRDist2(plane2))+
", world rdist other="+Math.sqrt(plane2.getWorldPlaneRDist2(plane1))+
", world rdist sum="+Math.sqrt(plane1.getWorldPlaneRDist2(plane2)+
plane2.getWorldPlaneRDist2(plane1)));
}
return weak_and_close;
}
if (OK_to_merge && !notOK_to_merge) {
if (debugLevel > 0){
System.out.println(prefix+": planes FIT");
if (debugLevel > 1){
System.out.println(prefix+" (fit) this_rq="+this_rq+
" this_rq_eq="+this_rq_eq+
" this_wrq=" + (this_wrq) +
" this_wrq_eq=" + (this_wrq_eq) +
" w1="+w1+" w2="+w2+
" L1="+plane1.getValue()+" L2="+plane2.getValue()+
" L="+merged_ev+" L_eq="+merged_ev_eq+
" L1W="+plane1.getWValue()+" L2W="+plane2.getWValue()+" LW="+merged_wev+
" L_eqW="+merged_wev_eq);
System.out.println(prefix+" (fit) world sin2 ="+
plane1.getWorldSin2(plane2));
System.out.println(prefix+ " (fit)" +
" world rdist this="+ Math.sqrt(plane1.getWorldPlaneRDist2(plane2))+
", world rdist other="+Math.sqrt(plane2.getWorldPlaneRDist2(plane1))+
", world rdist sum="+Math.sqrt(plane1.getWorldPlaneRDist2(plane2)+
plane2.getWorldPlaneRDist2(plane1)));
}
return true;
}
return true;
}
if (debugLevel > 0) {
System.out.println(prefix+": planes DO NOT FIT");
......@@ -236,13 +419,15 @@ public class LinkPlanes {
TilePlanes.PlaneData plane2,
double merged_ev, // if NaN will calculate assuming the same supertile
double merged_ev_eq, // if NaN will calculate assuming the same supertile
double merged_wev, // if NaN will calculate assuming the same supertile - for world
double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world
String prefix,
int debugLevel)
{
if ((plane1 == null) || (plane2 == null)) return null;
TilePlanes.PlaneData merged_pd = null;
TilePlanes.PlaneData merged_pd_eq = null;
if (Double.isNaN(merged_ev)) {
if (Double.isNaN(merged_ev) || Double.isNaN(merged_wev)) {
merged_pd = plane1.mergePlaneToThis(
plane2, // PlaneData otherPd,
1.0, // double scale_other,
......@@ -251,9 +436,10 @@ public class LinkPlanes {
true, // boolean sum_weights,
plPreferDisparity,
debugLevel - 1); // int debugLevel)
merged_ev = merged_pd.getValue();
merged_ev = merged_pd.getValue();
merged_wev = merged_pd.getWValue();
}
if (Double.isNaN(merged_ev_eq)) {
if (Double.isNaN(merged_ev_eq) || Double.isNaN(merged_wev_eq)) {
merged_pd_eq = plane1.mergePlaneToThis(
plane2, // PlaneData otherPd,
1.0, // double scale_other,
......@@ -262,7 +448,8 @@ public class LinkPlanes {
true, // boolean sum_weights,
plPreferDisparity,
debugLevel - 1); // int debugLevel)
merged_ev_eq = merged_pd_eq.getValue();
merged_ev_eq = merged_pd_eq.getValue();
merged_wev_eq = merged_pd_eq.getWValue();
}
double w1 = plane1.getWeight();
double w2 = plane2.getWeight();
......@@ -288,15 +475,34 @@ public class LinkPlanes {
1.0, // double w2)
plEigenFloor);// double eigen_floor)
this_rq /= (w1 + w2); // for comparison reduce this value for stronger planes
double this_wrq = mergeRQuality(
plane1.getWValue(), // double L1,
plane2.getWValue(), // double L2,
merged_wev, // double L,
w1, // double w1,
w2, // double w2)
0.0);// double eigen_floor)
this_wrq /= (w1 + w2); // for comparison reduce this value for stronger planes
double this_wrq_eq = mergeRQuality(
plane1.getWValue(), // double L1,
plane2.getWValue(), // double L2,
merged_wev, // double L,
1.0, // double w1,
1.0, // double w2)
0.0);// double eigen_floor)
this_wrq_eq /= (w1 + w2); // for comparison reduce this value for stronger planes
double sin2 = plane1.getWorldSin2(plane2);
double rdist2 = plane1.getWorldPlaneRDist2(plane2) + plane2.getWorldPlaneRDist2(plane1);
double [] costs = {
this_rq * plCostKrq,
this_rq_eq * plCostKrqEq,
sin2 * plCostSin2,
rdist2 * plCostRdist2};
double cost = costs[0]+costs[1]+costs[2]+costs[3];
this_rq * plCostKrq,
this_rq_eq * plCostKrqEq,
this_wrq * plCostWrq,
this_wrq_eq * plCostWrqEq,
sin2 * plCostSin2,
rdist2 * plCostRdist2};
double cost = costs[0]+costs[1]+costs[2]+costs[3]+costs[4]+costs[5];
double [] qualities = {
this_rq,
this_rq_eq,
......@@ -304,7 +510,9 @@ public class LinkPlanes {
costs[0]/cost,
costs[1]/cost,
costs[2]/cost,
costs[3]/cost};
costs[3]/cost,
costs[4]/cost,
costs[5]/cost};
if (debugLevel > 0){
System.out.println(prefix+" cost="+cost);
......@@ -313,16 +521,23 @@ public class LinkPlanes {
((int)(100*qualities[3]))+"%, "+
((int)(100*qualities[4]))+"%, "+
((int)(100*qualities[5]))+"%, "+
((int)(100*qualities[6]))+"%");
((int)(100*qualities[6]))+"%, "+
((int)(100*qualities[7]))+"%, "+
((int)(100*qualities[8]))+"%");
System.out.println(prefix+
" this_rq="+this_rq+
" this_rq=" + (this_rq) +
" this_rq=" + this_rq+
" this_wrq=" + (this_wrq) +
" this_wrq_eq=" + (this_wrq_eq) +
" this_wrq_raw=" + (this_wrq * (w1+w2)) +
" this_wrq_eq_raw=" + (this_wrq_eq * (w1+w2)) +
" this_rq_raw="+(this_rq * (w1+w2)) +
" this_rq_eq="+(this_rq_eq) +
" this_rq_nofloor="+(this_rq_nofloor) +
" w1="+w1+" w2="+w2+
" L1="+plane1.getValue()+" L2="+plane2.getValue()+" L="+merged_ev+
" L_eq="+merged_ev_eq);
" L_eq="+merged_ev_eq+
" L1W="+plane1.getWValue()+" L2W="+plane2.getWValue()+" LW="+merged_wev+
" L_eqW="+merged_wev_eq);
System.out.println(prefix + ", world sin2 =" + plane1.getWorldSin2(plane2));
System.out.println(prefix+
", world rdist this="+ Math.sqrt(plane1.getWorldPlaneRDist2(plane2))+
......@@ -378,7 +593,8 @@ public class LinkPlanes {
int stx = stx0 + dirsYX[dir][1];
int sty = sty0 + dirsYX[dir][0];
// if ((sty < stilesY) && (sty > 0) && (stx < 0)) {
if ((stx < stilesX) && (sty < stilesY) && (sty > 0)) {
if ((stx < stilesX) && (sty < stilesY) && (sty >= 0)) {
// if ((stx < stilesX) && (sty < stilesY) && (sty > 0)) {
int nsTile = sty * stilesX + stx; // from where to get
if (nsTile >= planes.length){
System.out.println("BUG!!!!");
......@@ -404,7 +620,8 @@ public class LinkPlanes {
if (merged_pd !=null) { // now always, but may add later
/// merged_pd.scaleWeight(0.5);
this_plane.setNeibMatch(dir, np, merged_pd.getValue()); // smallest eigenValue
this_plane.setNeibMatch (dir, np, merged_pd.getValue()); // smallest eigenValue
this_plane.setNeibWMatch(dir, np, merged_pd.getWValue()); // smallest eigenValue
}
merged_pd = this_plane.mergePlaneToThis(
......@@ -418,7 +635,8 @@ public class LinkPlanes {
if (merged_pd !=null) { // now always, but may add later
/// merged_pd.scaleWeight(0.5);
this_plane.setNeibMatchEq(dir, np, merged_pd.getValue()); // smallest eigenValue
this_plane.setNeibMatchEq (dir, np, merged_pd.getValue()); // smallest eigenValue
this_plane.setNeibWMatchEq(dir, np, merged_pd.getWValue()); // smallest eigenValue
}
}
}
......@@ -459,7 +677,8 @@ public class LinkPlanes {
for (int dir = 4; dir < 8; dir++){ // other half - copy from opposite
int stx = stx0 + dirsYX[dir][1];
int sty = sty0 + dirsYX[dir][0];
if ((sty < stilesY) && (sty > 0) && (stx > 0)) {
// if ((sty < stilesY) && (sty > 0) && (stx > 0)) {
if ((sty < stilesY) && (sty >= 0) && (stx >= 0)) {
int nsTile = sty * stilesX + stx; // from where to get
TilePlanes.PlaneData [] other_planes = planes[nsTile];
if (other_planes !=null) {
......@@ -474,6 +693,15 @@ public class LinkPlanes {
if (nm != null) {
this_plane.setNeibMatchEq(dir,np, nm[np0]); //
}
nm = other_planes[np].getMergedWValue(dir-4);
if (nm != null) {
this_plane.setNeibWMatch(dir,np, nm[np0]); //
}
nm = other_planes[np].getMergedWValueEq(dir-4);
if (nm != null) {
this_plane.setNeibWMatchEq(dir,np, nm[np0]); //
}
}
}
......@@ -502,9 +730,10 @@ public class LinkPlanes {
public void filterNeighborPlanes(
final TilePlanes.PlaneData [][] planes,
final int debugLevel,
final int dbg_X,
final int dbg_Y)
final boolean merge_low_eigen,
final int debugLevel,
final int dbg_X,
final int dbg_Y)
{
final int tilesX = st.tileProcessor.getTilesX();
final int tilesY = st.tileProcessor.getTilesY();
......@@ -544,8 +773,11 @@ public class LinkPlanes {
for (int np0 = np0_min; np0 < planes[nsTile0].length; np0++){
if ((planes[nsTile0][np0] != null) && (planes[nsTile0][np0].getMergedValue(dir) != null)){
double [] merge_ev = planes[nsTile0][np0].getMergedValue(dir);
double [] merge_ev_eq = planes[nsTile0][np0].getMergedValueEq(dir);
double [] merge_ev = planes[nsTile0][np0].getMergedValue(dir);
double [] merge_ev_eq = planes[nsTile0][np0].getMergedValueEq(dir);
double [] merge_wev = planes[nsTile0][np0].getMergedWValue(dir);
double [] merge_wev_eq = planes[nsTile0][np0].getMergedWValueEq(dir);
if ( (merge_ev != null) &&
(merge_ev_eq != null)) {
int np_min = SuperTiles.LOWEST_PLANE(merge_ev.length);
......@@ -556,8 +788,11 @@ public class LinkPlanes {
if (planesFit(
planes[nsTile0][np0], // TilePlanes.PlaneData plane1, // should belong to the same supertile (or be converted for one)
planes[nsTile][np], // TilePlanes.PlaneData plane2,
merge_low_eigen, // false, // boolean merge_weak, // use for same supertile merge
merge_ev[np], // double merged_ev, // if NaN will calculate assuming the same supertile
merge_ev_eq[np], //double merged_ev_eq, // if NaN will calculate assuming the same supertile
merge_wev[np], // double merged_wev, // if NaN will calculate assuming the same supertile - for world
merge_wev_eq[np], // double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world
prefix, // String prefix,
dl) // int debugLevel)
){
......@@ -577,6 +812,233 @@ public class LinkPlanes {
}
ImageDtt.startAndJoin(threads);
}
public void setNonExclusive(
final TilePlanes.PlaneData [][] planes,
final int debugLevel,
final int dbg_X,
final int dbg_Y)
{
final int tilesX = st.tileProcessor.getTilesX();
final int tilesY = st.tileProcessor.getTilesY();
final int superTileSize = st.tileProcessor.getSuperTileSize();
// final int tileSize = tileProcessor.getTileSize();
final int stilesX = (tilesX + superTileSize -1)/superTileSize;
final int stilesY = (tilesY + superTileSize -1)/superTileSize;
final int nStiles = stilesX * stilesY;
final double [] nan_plane = new double [superTileSize*superTileSize];
for (int i = 0; i < nan_plane.length; i++) nan_plane[i] = Double.NaN;
final int [][] dirsYX = {{-1, 0},{-1,1},{0,1},{1,1},{1,0},{1,-1},{0,-1},{-1,-1}};
// final int debug_stile = 20 * stilesX + 27;
// final int debug_stile = 17 * stilesX + 27;
// final int debug_stile = 9 * stilesX + 26;
final int debug_stile = dbg_Y * stilesX + dbg_X;
final Thread[] threads = ImageDtt.newThreadArray(st.tileProcessor.threadsMax);
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
// TilePlanes.PlaneData [][] dbg_planes = planes;
for (int nsTile0 = ai.getAndIncrement(); nsTile0 < nStiles; nsTile0 = ai.getAndIncrement()) {
int sty0 = nsTile0 / stilesX;
int stx0 = nsTile0 % stilesX;
int dl = (nsTile0 == debug_stile) ? debugLevel:0;
if ( planes[nsTile0] != null) {
if (dl > 0){
System.out.println("setNonExclusive() nsTile0="+nsTile0);
}
for (int np0 = 0; np0 < planes[nsTile0].length; np0++) if (planes[nsTile0][np0] != null) {
TilePlanes.PlaneData merged_pd = planes[nsTile0][np0];
ArrayList<Point> neib_list = new ArrayList<Point>();
final double [][] merged_ev = planes[nsTile0][np0].getMergedValue();
for (int dir = 0; dir < 8; dir++) if (planes[nsTile0][np0].hasMergedValid(dir)){ //
int stx = stx0 + dirsYX[dir][1];
int sty = sty0 + dirsYX[dir][0];
int nsTile = sty * stilesX + stx; // from where to get
// find best individual connection among valid ones
boolean [] merged_valid = planes[nsTile0][np0].getMergedValid(dir);
// double [] merged_ev = ignore_weights? (planes[nsTile0][np0].getMergedValueEq(dir)):(planes[nsTile0][np0].getMergedValue(dir));
// double [] merged_ev =planes[nsTile0][np0].getMergedValue(dir);
int best_np = -1;
for (int np = 0; np < merged_valid.length; np++){
if (merged_valid[np] && ((best_np < 0) || (merged_ev[dir][np] < merged_ev[dir][best_np]))){
best_np = np;
}
}
if (best_np >=0) {
neib_list.add(new Point(dir, best_np));
}
}
Collections.sort(neib_list, new Comparator<Point>() {
@Override
public int compare(Point lhs, Point rhs) {
// -1 - less than, 1 - greater than, 0 - equal, all inverted for descending
return (merged_ev[lhs.x][lhs.y] < merged_ev[rhs.x][rhs.y]) ? -1 : (merged_ev[lhs.x][lhs.y] > merged_ev[rhs.x][rhs.y]) ? 1 : 0;
}
});
int [] nb = {-1,-1,-1,-1,-1,-1,-1,-1};
if (!neib_list.isEmpty()) {
double cut_value = merged_ev[neib_list.get(0).x][neib_list.get(0).y]*plCutTail;
if (cut_value < plMinTail) cut_value = plMinTail;
for (Point p: neib_list){
int dir = p.x;
int np = p.y;
if (merged_ev[dir][np] <= cut_value ){
int stx = stx0 + dirsYX[dir][1];
int sty = sty0 + dirsYX[dir][0];
int nsTile = sty * stilesX + stx; // from where to get
nb[dir] = np;
TilePlanes.PlaneData other_plane = planes[nsTile0][np0].getPlaneToThis(
planes[nsTile][np],
dl - 3); // debugLevel);
if (other_plane != null){
TilePlanes.PlaneData merged_pd_back = merged_pd.clone();
merged_pd = merged_pd.mergePlaneToThis(
other_plane, // PlaneData otherPd,
1.0, // double scale_other,
1.0, // double starWeightPwr, // Use this power of tile weight when calculating connection cost
false, // boolean ignore_weights,
true, // boolean sum_weights,
plPreferDisparity,
dl - 3); // int debugLevel)
if (merged_pd.getValue() > plMaxEigen){
nb[dir] = -1;
if (dl > -1){
String s = "[";
for (int i = 0; i < 8; i++){
s += (nb[i]>=0) ? nb[i]:"x";
if (i < 7) s += ", ";
}
s+="]";
System.out.println("setNonExclusive() nsTile0="+nsTile0+":"+np0+
" composite weighted plane value "+merged_pd.getValue()+
" exceeded plMaxEigen="+plMaxEigen+
". Removing last contributor: dir="+dir+", np="+np+
", remaining: "+s);
}
merged_pd = merged_pd_back.clone();
break;
}
}
}
}
merged_pd.getWorldXYZ(0); // debugLevel); // just to recalculate world data for debugging
merged_pd.setNeibBest(nb);
planes[nsTile0][np0].setNonexclusiveStar(merged_pd);
if (dl > 0){
String neib_str = "";
for (int dir = 0; dir < 8; dir++){
neib_str += (nb[dir]>=0)?nb[dir]:"x";
if (dir < 7) neib_str += ", ";
}
System.out.println("setNonExclusive() nsTile0="+nsTile0+":"+np0+
" weighted neighbors ["+neib_str+"], cutoff value = "+cut_value+
" merged value = "+merged_pd.getValue());
}
}
final double [][] merged_ev_eq = planes[nsTile0][np0].getMergedValueEq();
merged_pd = planes[nsTile0][np0];
neib_list = new ArrayList<Point>();
for (int dir = 0; dir < 8; dir++) if (planes[nsTile0][np0].hasMergedValid(dir)){ //
int stx = stx0 + dirsYX[dir][1];
int sty = sty0 + dirsYX[dir][0];
int nsTile = sty * stilesX + stx; // from where to get
// find best individual connection among valid ones
boolean [] merged_valid = planes[nsTile0][np0].getMergedValid(dir);
// double [] merged_ev = ignore_weights? (planes[nsTile0][np0].getMergedValueEq(dir)):(planes[nsTile0][np0].getMergedValue(dir));
int best_np = -1;
for (int np = 0; np < merged_valid.length; np++){
if (merged_valid[np] && ((best_np < 0) || (merged_ev_eq[dir][np] < merged_ev_eq[dir][best_np]))){
best_np = np;
}
}
if (best_np >=0) {
neib_list.add(new Point(dir, best_np));
}
}
Collections.sort(neib_list, new Comparator<Point>() {
@Override
public int compare(Point lhs, Point rhs) {
// -1 - less than, 1 - greater than, 0 - equal, all inverted for descending
return (merged_ev_eq[lhs.x][lhs.y] < merged_ev_eq[rhs.x][rhs.y]) ? -1 : (merged_ev_eq[lhs.x][lhs.y] > merged_ev_eq[rhs.x][rhs.y]) ? 1 : 0;
}
});
int [] nb_eq = {-1,-1,-1,-1,-1,-1,-1,-1};
if (!neib_list.isEmpty()) {
double cut_value = merged_ev_eq[neib_list.get(0).x][neib_list.get(0).y]*plCutTail;
if (cut_value < plMinTail) cut_value = plMinTail;
for (Point p: neib_list){
int dir = p.x;
int np = p.y;
if (merged_ev_eq[dir][np] <= cut_value ){
int stx = stx0 + dirsYX[dir][1];
int sty = sty0 + dirsYX[dir][0];
int nsTile = sty * stilesX + stx; // from where to get
nb_eq[dir] = np;
TilePlanes.PlaneData other_plane = planes[nsTile0][np0].getPlaneToThis(
planes[nsTile][np],
dl - 3); // debugLevel);
TilePlanes.PlaneData merged_pd_back = merged_pd.clone();
if (other_plane != null){
merged_pd = merged_pd.mergePlaneToThis(
other_plane, // PlaneData otherPd,
1.0, // double scale_other,
1.0, // double starWeightPwr, // Use this power of tile weight when calculating connection cost
true, // boolean ignore_weights,
true, // boolean sum_weights,
plPreferDisparity,
dl - 3); // int debugLevel)
if (merged_pd.getValue() > plMaxEigen){
nb_eq[dir] = -1;
if (dl > -1){
String s = "[";
for (int i = 0; i < 8; i++){
s += (nb_eq[i]>=0) ? nb_eq[i]:"x";
if (i < 7) s += ", ";
}
s+="]";
System.out.println("setNonExclusive() nsTile0="+nsTile0+":"+np0+
" composite equalized plane value "+merged_pd.getValue()+
" exceeded plMaxEigen="+plMaxEigen+
". Removing last contributor: dir="+dir+", np="+np+
", remaining: "+s);
}
merged_pd = merged_pd_back.clone();
break;
}
}
}
}
merged_pd.getWorldXYZ(0); // debugLevel); // just to recalculate world data for debugging
merged_pd.setNeibBest(nb_eq);
planes[nsTile0][np0].setNonexclusiveStarEq(merged_pd);
if (dl > 0){
String neib_str = "";
for (int dir = 0; dir < 8; dir++){
neib_str += (nb_eq[dir]>=0)?nb_eq[dir]:"x";
if (dir < 7) neib_str += ", ";
}
System.out.println("setNonExclusive() nsTile0="+nsTile0+":"+np0+
" equalized neighbors ["+neib_str+"], cutoff value = "+cut_value+
" merged value = "+merged_pd.getValue());
}
}
}
}
}
}
};
}
ImageDtt.startAndJoin(threads);
}
/**
* Find mutual links between multi-layer planes for supertiles. requires that for each plane there are calculated smalles eigenvalues
......@@ -608,7 +1070,7 @@ public class LinkPlanes {
final AtomicInteger ai = new AtomicInteger(0);
final AtomicInteger ai_numThread = new AtomicInteger(0);
final int numThreads = threads.length;
final double [][][] all_quality_stats = new double [numThreads][2][4]; // contributions of all [0] and winners [2], 4 parameters
final double [][][] all_quality_stats = new double [numThreads][2][6]; // contributions of all [0] and winners [2], 6 parameters
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
......@@ -670,6 +1132,8 @@ public class LinkPlanes {
for (int np0 = np0_min; np0 < this_matched.length; np0++) if (planes[nsTile0][np0] != null){
double [] merge_ev = planes[nsTile0][np0].getMergedValue(dir);
double [] merge_ev_eq = planes[nsTile0][np0].getMergedValueEq(dir);
double [] merge_wev = planes[nsTile0][np0].getMergedWValue(dir);
double [] merge_wev_eq = planes[nsTile0][np0].getMergedWValueEq(dir);
boolean [] merge_valid = planes[nsTile0][np0].getMergedValid(dir);
qualities[np0] = new double[ merge_ev.length][];
......@@ -680,8 +1144,10 @@ public class LinkPlanes {
qualities[np0][np] = getFitQualities( // {this_rq, this_rq_eq};
planes[nsTile0][np0], //TilePlanes.PlaneData plane1, // should belong to the same supertile (or be converted for one)
planes[nsTile][np], //TilePlanes.PlaneData plane2,
merge_ev[np], // double merged_ev, // if NaN will calculate assuming the same supertile
merge_ev_eq[np], // double merged_ev_eq, // if NaN will calculate assuming the same supertile
merge_ev[np], // double merged_ev, // if NaN will calculate assuming the same supertile
merge_ev_eq[np], // double merged_ev_eq, // if NaN will calculate assuming the same supertile
merge_wev[np], // double merged_wev, // if NaN will calculate assuming the same supertile - for world
merge_wev_eq[np], // double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world
prefix, // String prefix,
dl); // int debugLevel)
if (qualities != null) {
......@@ -775,7 +1241,7 @@ public class LinkPlanes {
};
}
ImageDtt.startAndJoin(threads);
double [][] quality_stats = new double [2][4]; // contributions of all [0] and winners [2], 4 parameters
double [][] quality_stats = new double [2][6]; // contributions of all [0] and winners [2], 4 parameters
for (int n = 0; n < all_quality_stats.length; n++){
for (int i = 0; i < all_quality_stats[n].length; i++){
for (int j = 0; j < all_quality_stats[n][i].length; j++){
......@@ -797,10 +1263,12 @@ public class LinkPlanes {
}
if (debugLevel > -1){
System.out.println("Contribution of various factors for all considered pairs and the winners:");
System.out.println(" weighted quality: "+quality_stats[0][0]+" (all), "+quality_stats[1][0]+" (winners)");
System.out.println("non-weighted quality: "+quality_stats[0][1]+" (all), "+quality_stats[1][1]+" (winners)");
System.out.println(" sin2: "+quality_stats[0][2]+" (all), "+quality_stats[1][2]+" (winners)");
System.out.println(" rdist2: "+quality_stats[0][3]+" (all), "+quality_stats[1][3]+" (winners)");
System.out.println(" weighted quality (disp) : "+quality_stats[0][0]+" (all), "+quality_stats[1][0]+" (winners)");
System.out.println("non-weighted quality (disp) : "+quality_stats[0][1]+" (all), "+quality_stats[1][1]+" (winners)");
System.out.println(" weighted quality (world): "+quality_stats[0][2]+" (all), "+quality_stats[1][2]+" (winners)");
System.out.println("non-weighted quality (world): "+quality_stats[0][3]+" (all), "+quality_stats[1][3]+" (winners)");
System.out.println(" sin2: "+quality_stats[0][4]+" (all), "+quality_stats[1][4]+" (winners)");
System.out.println(" rdist2: "+quality_stats[0][5]+" (all), "+quality_stats[1][5]+" (winners)");
}
return quality_stats;
......@@ -1092,13 +1560,30 @@ public class LinkPlanes {
" overlap1="+ ((int) (100 *overlaps[0]))+"% "+
" overlap2="+ ((int) (100 *overlaps[1]))+"% ");
}
} else {
if (debugLevel > 0){
System.out.println("overlapSameTileCandidates(): REJECTED pair nsTile="+nsTile+":"+np1+":"+np2+
" as it has HIGH overlap: "+
// maybe one of the planes is very weak and they are close by disparity?
// planes[nsTile][np1]
if (areWeakSimilar(
planes[nsTile][np1], // TilePlanes.PlaneData plane1, // should belong to the same supertile (or be converted for one)
planes[nsTile][np2], // TilePlanes.PlaneData plane2,
Double.NaN, // double merged_ev_eq, // if NaN will calculate assuming the same supertile
"overlapSameTileCandidates() "+nsTile+":"+np1+":"+np2, // String prefix,
dl)// int debugLevel)
){
overlap_merge_candidates[nsTile][np1][np2] = true;
overlap_merge_candidates[nsTile][np2][np1] = true;
System.out.println("overlapSameTileCandidates(): ACCEPTED pair nsTile="+nsTile+":"+np1+":"+np2+
" even as it has HIGH overlap: "+
" overlap1="+ ((int) (100 *overlaps[0]))+"% "+
" overlap2="+ ((int) (100 *overlaps[1]))+"% ");
" overlap2="+ ((int) (100 *overlaps[1]))+"% "+
"because at least one plane is weak and they have small disparity difference");
} else {
if (debugLevel > 0){
System.out.println("overlapSameTileCandidates(): REJECTED pair nsTile="+nsTile+":"+np1+":"+np2+
" as it has HIGH overlap: "+
" overlap1="+ ((int) (100 *overlaps[0]))+"% "+
" overlap2="+ ((int) (100 *overlaps[1]))+"% ");
}
}
}
}
......@@ -1111,6 +1596,446 @@ public class LinkPlanes {
return overlap_merge_candidates;
}
// verify that after merging a pair composite plane will have connections valid in each of the directions the pair had valid
public boolean [][][] keepSameTileConnections(
final TilePlanes.PlaneData [][] planes,
final int [][][] merge_candidates,
final boolean [][][] valid_candidates, // will be updated
final boolean merge_low_eigen,
final int debugLevel,
final int dbg_X,
final int dbg_Y)
{
final int tilesX = st.tileProcessor.getTilesX();
final int tilesY = st.tileProcessor.getTilesY();
final int superTileSize = st.tileProcessor.getSuperTileSize();
// final int tileSize = tileProcessor.getTileSize();
final int stilesX = (tilesX + superTileSize -1)/superTileSize;
final int stilesY = (tilesY + superTileSize -1)/superTileSize;
final int nStiles = stilesX * stilesY;
final int [][] dirsYX = {{-1, 0},{-1,1},{0,1},{1,1},{1,0},{1,-1},{0,-1},{-1,-1}};
// final boolean [][] merge_pairs = new boolean [nStiles][];
// final boolean [][][] overlap_merge_candidates = new boolean [nStiles][][];
final int debug_stile = dbg_Y * stilesX + dbg_X;
final Thread[] threads = ImageDtt.newThreadArray((debugLevel > 1)? 1 : st.tileProcessor.threadsMax);
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nsTile0 = ai.getAndIncrement(); nsTile0 < nStiles; nsTile0 = ai.getAndIncrement()) if ( merge_candidates[nsTile0] != null) {
int sty0 = nsTile0 / stilesX;
int stx0 = nsTile0 % stilesX;
int dl = ((debugLevel > 0) && (nsTile0 == debug_stile)) ? 3: ((debugLevel > 1) ? 1:0);
if (dl > 1){
System.out.println("overlapSameTileCandidates(): nsTile="+nsTile0);
}
// int n_planes = planes[nsTile0].length;
// overlap_merge_candidates[nsTile] = new boolean [n_planes][n_planes];
// get original directions
for (int np1 = 0; np1 < planes[nsTile0].length; np1++) if (planes[nsTile0][np1] != null){
for (int np2 = np1 + 1; np2 < planes[nsTile0].length; np2++) if (planes[nsTile0][np2] != null){
if (valid_candidates[nsTile0][np1][np2]) { // only check pair considered valid
boolean [] old_valid = new boolean[8];
for (int dir = 0; dir < 8; dir++){
old_valid[dir] = planes[nsTile0][np1].hasMergedValid(dir) || planes[nsTile0][np2].hasMergedValid(dir);
}
// should be merged same way as later actually. Does it need to be recalculated from the original tiles?
TilePlanes.PlaneData merged_pd = planes[nsTile0][np1].mergePlaneToThis(
planes[nsTile0][np2], // PlaneData otherPd,
1.0, // double scale_other,
1.0, // double starWeightPwr, // Use this power of tile weight when calculating connection cost
false, // boolean ignore_weights,
true, // boolean sum_weights,
plPreferDisparity,
debugLevel - 2); // int debugLevel)
// is the merge too bad already?
double corr_max_eigen = corrMaxEigen(
plMaxEigen,
plDispNorm,
merged_pd);
if ((plMaxEigen != 0.0) &&
(merged_pd.getValue() > corr_max_eigen)){
valid_candidates[nsTile0][np1][np2] = false;
valid_candidates[nsTile0][np2][np1] = false;
if (debugLevel > 0){
System.out.println("keepSameTileConnections(): REMOVING pair nsTile0="+nsTile0+":"+np1+":"+np2+
" as the merge would have high eigenvalue = "+merged_pd.getValue()+" > " + corr_max_eigen);
}
continue; // to the next pair
}
// now verify that the merged plane can be connected in each of the original directions
ArrayList<Integer> debug_dirs_list = new ArrayList<Integer>();
for (int dir = 0; dir < 8; dir++) if (old_valid[dir]){ //
int stx = stx0 + dirsYX[dir][1];
int sty = sty0 + dirsYX[dir][0];
int nsTile = sty * stilesX + stx; // from where to get
boolean fit = false;
for (int np = 0; np < planes[nsTile].length; np++){
if (planes[nsTile][np] != null){
String prefix = "keepSameTileConnections() nsTile0="+nsTile0+":"+np1+":"+np2+" dir="+dir+" nsTile="+nsTile+" np="+np;
TilePlanes.PlaneData other_plane = merged_pd.getPlaneToThis(
planes[nsTile][np],
dl-3); // debugLevel);
if (planesFit(
merged_pd, // TilePlanes.PlaneData plane1, // should belong to the same supertile (or be converted for one)
other_plane, // TilePlanes.PlaneData plane2,
merge_low_eigen, // false, // boolean merge_weak, // use for same supertile merge
Double.NaN, // double merged_ev, // if NaN will calculate assuming the same supertile
Double.NaN, //double merged_ev_eq, // if NaN will calculate assuming the same supertile
Double.NaN, // double merged_wev, // if NaN will calculate assuming the same supertile - for world
Double.NaN, // double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world
prefix, // String prefix,
dl-1)){ // int debugLevel)
fit = true;
break;
}
}
}
if (!fit){
valid_candidates[nsTile0][np1][np2] = false;
valid_candidates[nsTile0][np2][np1] = false;
if (debugLevel > 0){
debug_dirs_list.add(dir);
}
if (debugLevel < 2){
break; // no need to check other directions, keep just for debug
}
}
}
if (debugLevel > 0){
if (!debug_dirs_list.isEmpty()){
System.out.println("keepSameTileConnections(): REMOVING pair nsTile0="+nsTile0+":"+np1+":"+np2+
" as the merge would break previous connection in directions "+ debug_dirs_list);
} else {
System.out.println("keepSameTileConnections(): KEEPING pair nsTile0="+nsTile0+":"+np1+":"+np2+
" as the merge would keep connection in each of the previously connected directions");
}
}
}
}
}
}
}
};
}
ImageDtt.startAndJoin(threads);
return valid_candidates;
}
public void costSameTileConnections(
final boolean ignore_weights,
final double threshold_worst,
final double threshold_world_worst,
final TilePlanes.PlaneData [][] planes,
final int [][][] merge_candidates,
final boolean [][][] valid_candidates, // will be updated
// final boolean merge_low_eigen,
final int debugLevel,
final int dbg_X,
final int dbg_Y)
{
final int tilesX = st.tileProcessor.getTilesX();
final int tilesY = st.tileProcessor.getTilesY();
final int superTileSize = st.tileProcessor.getSuperTileSize();
// final int tileSize = tileProcessor.getTileSize();
final int stilesX = (tilesX + superTileSize -1)/superTileSize;
final int stilesY = (tilesY + superTileSize -1)/superTileSize;
final int nStiles = stilesX * stilesY;
final int [][] dirsYX = {{-1, 0},{-1,1},{0,1},{1,1},{1,0},{1,-1},{0,-1},{-1,-1}};
// final boolean [][] merge_pairs = new boolean [nStiles][];
final double [][][][][][] merged_neib_ev = new double [nStiles][][][][][];
final int debug_stile = dbg_Y * stilesX + dbg_X;
final Thread[] threads = ImageDtt.newThreadArray((debugLevel > 1)? 1 : st.tileProcessor.threadsMax);
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nsTile0 = ai.getAndIncrement(); nsTile0 < nStiles; nsTile0 = ai.getAndIncrement()) if ( merge_candidates[nsTile0] != null) {
int sty0 = nsTile0 / stilesX;
int stx0 = nsTile0 % stilesX;
int dl = ((debugLevel > 0) && (nsTile0 == debug_stile)) ? 3: ((debugLevel > 1) ? 1:0);
if (dl > 1){
System.out.println("costSameTileConnections(): nsTile="+nsTile0);
}
int n_planes = planes[nsTile0].length;
// overlap_merge_candidates[nsTile] = new boolean [n_planes][n_planes];
merged_neib_ev[nsTile0] = new double [n_planes][n_planes][4][][];
// get original directions
for (int np1 = 0; np1 < planes[nsTile0].length; np1++) if (planes[nsTile0][np1] != null){
for (int np2 = np1 + 1; np2 < planes[nsTile0].length; np2++) if (planes[nsTile0][np2] != null){
if (valid_candidates[nsTile0][np1][np2]) { // only check pair considered valid
String prefix = "costSameTileConnections() fit weighted: nsTile0="+nsTile0+" np1="+np1+" np2="+np2;
boolean fit1 = planesFit(
planes[nsTile0][np1].getNonexclusiveStar(), // TilePlanes.PlaneData plane1, // should belong to the same supertile (or be converted for one)
planes[nsTile0][np2].getNonexclusiveStar(), // TilePlanes.PlaneData plane2,
true, // boolean merge_weak, // use for same supertile merge
Double.NaN, // double merged_ev, // if NaN will calculate assuming the same supertile
Double.NaN, // double merged_ev_eq, // if NaN will calculate assuming the same supertile
Double.NaN, // double merged_wev, // if NaN will calculate assuming the same supertile - for world
Double.NaN, // double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world
prefix, // String prefix,
dl-1); // int debugLevel)
prefix = "costSameTileConnections() fit equal weight: nsTile0="+nsTile0+" np1="+np1+" np2="+np2;
boolean fit2 = planesFit(
planes[nsTile0][np1].getNonexclusiveStarEq(), // TilePlanes.PlaneData plane1, // should belong to the same supertile (or be converted for one)
planes[nsTile0][np2].getNonexclusiveStarEq(), // TilePlanes.PlaneData plane2,
true, // boolean merge_weak, // use for same supertile merge
Double.NaN, // double merged_ev, // if NaN will calculate assuming the same supertile
Double.NaN, // double merged_ev_eq, // if NaN will calculate assuming the same supertile
Double.NaN, // double merged_wev, // if NaN will calculate assuming the same supertile - for world
Double.NaN, // double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world
prefix, // String prefix,
dl-1); // int debugLevel)
if (!fit1 || !fit2){
valid_candidates[nsTile0][np1][np2] = false;
valid_candidates[nsTile0][np2][np1] = false;
if (dl > -1){
System.out.println("costSameTileConnections(): nsTile="+nsTile0+":"+np1+":"+np2+
" REMOVING PAIR, fit1="+fit1+" fit2="+fit2);
}
} else {
if (dl > -1){
System.out.println("costSameTileConnections(): nsTile="+nsTile0+":"+np1+":"+np2+
" KEEPING PAIR, fit1="+fit1+" fit2="+fit2);
}
}
/// final double threshold_worst,
// final double threshold_world_worst,
}
}
}
}
}
};
}
ImageDtt.startAndJoin(threads);
// return merged_neib_ev;
}
public double [][][][][][] costSameTileConnectionsOld(
final boolean ignore_weights,
final double threshold_worst,
final double threshold_world_worst,
final TilePlanes.PlaneData [][] planes,
final int [][][] merge_candidates,
final boolean [][][] valid_candidates, // will be updated
// final boolean merge_low_eigen,
final int debugLevel,
final int dbg_X,
final int dbg_Y)
{
final int tilesX = st.tileProcessor.getTilesX();
final int tilesY = st.tileProcessor.getTilesY();
final int superTileSize = st.tileProcessor.getSuperTileSize();
// final int tileSize = tileProcessor.getTileSize();
final int stilesX = (tilesX + superTileSize -1)/superTileSize;
final int stilesY = (tilesY + superTileSize -1)/superTileSize;
final int nStiles = stilesX * stilesY;
final int [][] dirsYX = {{-1, 0},{-1,1},{0,1},{1,1},{1,0},{1,-1},{0,-1},{-1,-1}};
// final boolean [][] merge_pairs = new boolean [nStiles][];
final double [][][][][][] merged_neib_ev = new double [nStiles][][][][][];
final int debug_stile = dbg_Y * stilesX + dbg_X;
final Thread[] threads = ImageDtt.newThreadArray((debugLevel > 1)? 1 : st.tileProcessor.threadsMax);
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nsTile0 = ai.getAndIncrement(); nsTile0 < nStiles; nsTile0 = ai.getAndIncrement()) if ( merge_candidates[nsTile0] != null) {
int sty0 = nsTile0 / stilesX;
int stx0 = nsTile0 % stilesX;
int dl = ((debugLevel > 0) && (nsTile0 == debug_stile)) ? 3: ((debugLevel > 1) ? 1:0);
if (dl > 1){
System.out.println("costSameTileConnections(): nsTile="+nsTile0);
}
int n_planes = planes[nsTile0].length;
// overlap_merge_candidates[nsTile] = new boolean [n_planes][n_planes];
merged_neib_ev[nsTile0] = new double [n_planes][n_planes][4][][];
// get original directions
for (int np1 = 0; np1 < planes[nsTile0].length; np1++) if (planes[nsTile0][np1] != null){
for (int np2 = np1 + 1; np2 < planes[nsTile0].length; np2++) if (planes[nsTile0][np2] != null){
if (valid_candidates[nsTile0][np1][np2]) { // only check pair considered valid
boolean [] old_valid = new boolean[8];
for (int dir = 0; dir < 8; dir++){
old_valid[dir] = planes[nsTile0][np1].hasMergedValid(dir) || planes[nsTile0][np2].hasMergedValid(dir);
}
// should be merged same way as later actually. Does it need to be recalculated from the original tiles?
TilePlanes.PlaneData this_plane = planes[nsTile0][np1].mergePlaneToThis(
planes[nsTile0][np2], // PlaneData otherPd,
1.0, // double scale_other,
1.0, // double starWeightPwr, // Use this power of tile weight when calculating connection cost
ignore_weights, // boolean ignore_weights,
true, // boolean sum_weights,
plPreferDisparity,
dl - 3); // int debugLevel)
// is the merge too bad already - should be already tested in keepSameTileConnections()
// now for each of the valid directions calculate similar to matchPlanes(), but in all 8 directions
double [][] merged_ev = new double [8][];
double [][] merged_ev_eq = new double [8][];
double [][] merged_wev = new double [8][];
double [][] merged_wev_eq = new double [8][];
for (int dir = 0; dir < 8; dir++) if (old_valid[dir]){ // all 8 here
int stx = stx0 + dirsYX[dir][1];
int sty = sty0 + dirsYX[dir][0];
// if ((sty < stilesY) && (sty > 0) && (stx < 0)) {
if ((stx < stilesX) && (sty < stilesY) && (sty > 0)) {
int nsTile = sty * stilesX + stx; // from where to get
if (nsTile >= planes.length){
System.out.println("BUG!!!!");
} else {
TilePlanes.PlaneData [] other_planes = planes[nsTile];
if (other_planes != null) {
merged_ev[dir] = new double [other_planes.length];
merged_ev_eq[dir] = new double [other_planes.length];
merged_wev[dir] = new double [other_planes.length];
merged_wev_eq[dir] = new double [other_planes.length];
// this_plane.initMergedValue(dir,other_planes.length); // filled with NaN
for (int np = 0; np < other_planes.length; np ++){
if (other_planes[np] != null) {
TilePlanes.PlaneData other_plane = this_plane.getPlaneToThis(
other_planes[np],
dl - 3); // debugLevel);
if (other_plane !=null) { // now always, but may add later
TilePlanes.PlaneData merged_pd = this_plane.mergePlaneToThis(
other_plane, // PlaneData otherPd,
1.0, // double scale_other,
1.0, // double starWeightPwr, // Use this power of tile weight when calculating connection cost
false, // boolean ignore_weights,
true, // boolean sum_weights,
plPreferDisparity,
dl - 3); // int debugLevel)
if (merged_pd !=null) { // now always, but may add later
merged_ev[dir][np] = merged_pd.getValue(); // smallest eigenValue
merged_wev[dir][np] = merged_pd.getWValue(); // smallest eigenValue
if (Double.isNaN(merged_ev[dir][np]) || Double.isNaN(merged_wev[dir][np]) ){
System.out.println("costSameTileConnections(): nsTile="+nsTile0+":"+np1+":"+np2+" NaN");
}
}
merged_pd = this_plane.mergePlaneToThis(
other_plane, // PlaneData otherPd,
1.0, // double scale_other,
1.0, // double starWeightPwr, // Use this power of tile weight when calculating connection cost
true, // false, // boolean ignore_weights,
true, // boolean sum_weights,
plPreferDisparity,
dl - 3); // int debugLevel)
if (merged_pd !=null) { // now always, but may add later
merged_ev_eq[dir][np] = merged_pd.getValue(); // smallest eigenValue
merged_wev_eq[dir][np] = merged_pd.getWValue(); // smallest eigenValue
if (Double.isNaN(merged_ev_eq[dir][np]) || Double.isNaN(merged_wev_eq[dir][np]) ){
System.out.println("costSameTileConnections(): nsTile="+nsTile0+":"+np1+":"+np2+" NaN2");
}
}
}
}
}
}
}
}
}
merged_neib_ev[nsTile0][np1][np2][0] = merged_ev;
merged_neib_ev[nsTile0][np1][np2][1] = merged_ev_eq;
merged_neib_ev[nsTile0][np1][np2][2] = merged_wev;
merged_neib_ev[nsTile0][np1][np2][3] = merged_wev_eq;
// calculate here, later move to a separate method
double [] weighted_costs= new double[8]; // first plane with its connections * startWeight, second, then composite with same neibs
double sw1 = 0.0, sw2 = 0.0;
for (int dir = 0; dir < 8; dir++) { // all 8 here
int stx = stx0 + dirsYX[dir][1];
int sty = sty0 + dirsYX[dir][0];
if ((stx < stilesX) && (sty < stilesY) && (sty > 0)) {
int nsTile = sty * stilesX + stx; // from where to get
int nnp1 = planes[nsTile0][np1].getNeibBest(dir);
if (nnp1 >= 0){
double sw = planes[nsTile][nnp1].getStarValueWeight()[1];
weighted_costs[0] += sw * planes[nsTile0][np1].getMergedValueEq(dir, nnp1);
weighted_costs[4] += sw * merged_ev_eq[dir][nnp1];
weighted_costs[2] += sw * planes[nsTile0][np1].getMergedWValueEq(dir, nnp1);
weighted_costs[6] += sw * merged_wev_eq[dir][nnp1];
sw1 += sw;
if (Double.isNaN(weighted_costs[0]) || Double.isNaN(weighted_costs[2]) ){
System.out.println("costSameTileConnections(): nsTile="+nsTile0+":"+np1+":"+np2+" NaN3");
}
}
int nnp2 = planes[nsTile0][np2].getNeibBest(dir);
if (nnp2 >= 0){
double sw = planes[nsTile][nnp2].getStarValueWeight()[1];
weighted_costs[1] += sw * planes[nsTile0][np2].getMergedValueEq(dir, nnp2);
weighted_costs[5] += sw * merged_ev_eq[dir][nnp2];
weighted_costs[3] += sw * planes[nsTile0][np2].getMergedValueEq(dir, nnp2);
weighted_costs[7] += sw * merged_ev_eq[dir][nnp2];
sw2 += sw;
if (Double.isNaN(weighted_costs[1]) || Double.isNaN(weighted_costs[3]) ){
System.out.println("costSameTileConnections(): nsTile="+nsTile0+":"+np1+":"+np2+" NaN3");
}
}
}
}
if ((sw1 > 0.0) && (sw2 > 0.0)) {
weighted_costs[0] /= sw1;
weighted_costs[2] /= sw1;
weighted_costs[4] /= sw1;
weighted_costs[6] /= sw1;
weighted_costs[1] /= sw2;
weighted_costs[3] /= sw2;
weighted_costs[5] /= sw2;
weighted_costs[7] /= sw2;
double k1 = weighted_costs[4]/weighted_costs[0];
double k2 = weighted_costs[5]/weighted_costs[1];
double k1w = weighted_costs[6]/weighted_costs[2];
double k2w = weighted_costs[7]/weighted_costs[3];
double worst_k = (k1 > k2)? k1: k2;
double worst_kw = (k1w > k2w)? k1w: k2w;
if (dl > -1){
System.out.println("costSameTileConnections(): nsTile="+nsTile0+":"+np1+":"+np2+
" worst_k="+worst_k+", sum="+(k1+k2)+", k1 = "+k1+", k2 = "+k2+
" weighted costs = ["+weighted_costs[0]+", "+weighted_costs[1]+", "+weighted_costs[4]+", "+weighted_costs[5]+"]");
System.out.println("costSameTileConnections(): nsTile="+nsTile0+":"+np1+":"+np2+
" worst_kw="+worst_kw+", sum="+(k1w+k2w)+", k1w = "+k1w+", k2w = "+k2w+
" weighted costs = ["+weighted_costs[2]+", "+weighted_costs[3]+", "+weighted_costs[6]+", "+weighted_costs[7]+"]");
}
if ((worst_k > threshold_worst) || (worst_kw > threshold_world_worst)){
valid_candidates[nsTile0][np1][np2] = false;
valid_candidates[nsTile0][np2][np1] = false;
if (dl > -1){
System.out.println("costSameTileConnections(): nsTile="+nsTile0+":"+np1+":"+np2+
" REMOVING PAIR");
}
}
/// final double threshold_worst,
// final double threshold_world_worst,
} else {
if (dl > -1){
System.out.println("costSameTileConnections(): nsTile="+nsTile0+":"+np1+":"+np2+
" one of the tiles was not connected");
}
}
}
}
}
}
}
};
}
ImageDtt.startAndJoin(threads);
return merged_neib_ev;
}
public boolean [][] mergeSameTileEvaluate(
final TilePlanes.PlaneData [][] planes,
final int [][][] merge_candidates,
......@@ -1148,8 +2073,11 @@ public class LinkPlanes {
if (planesFit(
planes[nsTile][np1], // TilePlanes.PlaneData plane1, // should belong to the same supertile (or be converted for one)
planes[nsTile][np2], // TilePlanes.PlaneData plane2,
Double.NaN, // calculate double merged_ev, // if NaN will calculate assuming the same supertile
Double.NaN, // calculate double merged_ev_eq, // if NaN will calculate assuming the same supertile
true, // boolean merge_weak, // use for same supertile merge
Double.NaN, // double merged_ev, // if NaN will calculate assuming the same supertile
Double.NaN, // double merged_ev_eq, // if NaN will calculate assuming the same supertile
Double.NaN, // double merged_wev, // if NaN will calculate assuming the same supertile - for world
Double.NaN, // double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world
prefix, // String prefix,
dl) // int debugLevel)
){
......@@ -1186,7 +2114,7 @@ public class LinkPlanes {
final AtomicInteger ai = new AtomicInteger(0);
final int quality_index = 3; // 0 - using strengths, 1 - equal strengths, 2 - composite
// TODO Make nooverlaps be overriden if ne of the planes is very weak and they are close by disparity
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
......@@ -1207,8 +2135,11 @@ public class LinkPlanes {
if (planesFit(
planes[nsTile][np1], // TilePlanes.PlaneData plane1, // should belong to the same supertile (or be converted for one)
planes[nsTile][np2], // TilePlanes.PlaneData plane2,
Double.NaN, // calculate double merged_ev, // if NaN will calculate assuming the same supertile
Double.NaN, // calculate double merged_ev_eq, // if NaN will calculate assuming the same supertile
true, // boolean merge_weak, // use for same supertile merge
Double.NaN, // double merged_ev, // if NaN will calculate assuming the same supertile
Double.NaN, // double merged_ev_eq, // if NaN will calculate assuming the same supertile
Double.NaN, // double merged_wev, // if NaN will calculate assuming the same supertile - for world
Double.NaN, // double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world
prefix, // String prefix,
dl) // int debugLevel)
){
......@@ -1285,8 +2216,10 @@ public class LinkPlanes {
double [] qualities = getFitQualities( // {this_rq, this_rq_eq};
planes[nsTile][np1], //TilePlanes.PlaneData plane1, // should belong to the same supertile (or be converted for one)
planes[nsTile][np2], //TilePlanes.PlaneData plane2,
Double.NaN, // double merged_ev, // if NaN will calculate assuming the same supertile
Double.NaN, // double merged_ev_eq, // if NaN will calculate assuming the same supertile
Double.NaN, // double merged_ev, // if NaN will calculate assuming the same supertile
Double.NaN, // double merged_ev_eq, // if NaN will calculate assuming the same supertile
Double.NaN, // double merged_wev, // if NaN will calculate assuming the same supertile - for world
Double.NaN, // double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world
prefix, // String prefix,
dl); // int debugLevel)
if (qualities != null) {
......@@ -1337,8 +2270,10 @@ public class LinkPlanes {
double [] qualities = getFitQualities( // {this_rq, this_rq_eq};
merged_pd, //TilePlanes.PlaneData plane1, // should belong to the same supertile (or be converted for one)
planes[nsTile][np], //TilePlanes.PlaneData plane2,
Double.NaN, // double merged_ev, // if NaN will calculate assuming the same supertile
Double.NaN, // double merged_ev_eq, // if NaN will calculate assuming the same supertile
Double.NaN, // double merged_ev, // if NaN will calculate assuming the same supertile
Double.NaN, // double merged_ev_eq, // if NaN will calculate assuming the same supertile
Double.NaN, // double merged_wev, // if NaN will calculate assuming the same supertile - for world
Double.NaN, // double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world
prefix, // String prefix,
dl); // int debugLevel)
if (qualities != null) {
......@@ -1471,4 +2406,121 @@ public class LinkPlanes {
return rquality;
}
public void calcStarValueStrength(
final boolean set_start_planes,
final double orthoWeight,
final double diagonalWeight,
final double starPwr, // Divide cost by number of connections to this power
final double starWeightPwr, // Use this power of tile weight when calculating connection cost
final double weightToDens, // Balance weighted density against density. 0.0 - density, 1.0 - weighted density
final double starValPwr, // Raise value of each tile before averaging
final int steps,
final TilePlanes.PlaneData [][] planes,
final boolean preferDisparity,
final int debugLevel)
{
final int tilesX = st.tileProcessor.getTilesX();
final int tilesY = st.tileProcessor.getTilesY();
final int superTileSize = st.tileProcessor.getSuperTileSize();
// final int tileSize = tileProcessor.getTileSize();
final int stilesX = (tilesX + superTileSize -1)/superTileSize;
final int stilesY = (tilesY + superTileSize -1)/superTileSize;
final int nStiles = stilesX * stilesY;
final TileSurface.TileNeibs tnSurface = st.tileSurface.new TileNeibs(stilesX, stilesY);
final Thread[] threads = ImageDtt.newThreadArray(st.tileProcessor.threadsMax);
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
ConnectionCosts connectionCosts = new ConnectionCosts(
orthoWeight, // double orthoWeight,
diagonalWeight, // double diagonalWeight,
starPwr, // double starPwr, // Divide cost by number of connections to this power
starWeightPwr, // double starWeightPwr, // Use this power of tile weight when calculating connection cost
weightToDens, // Balance weighted density against density. 0.0 - density, 1.0 - weighted density
starValPwr, //double starValPwr, // Raise value of each tile before averaging
steps, // int steps,
planes, // TilePlanes.PlaneData [][] planes,
tnSurface, // TileSurface.TileNeibs tnSurface,
preferDisparity); // boolean preferDisparity)
int [] mod_supertiles = new int[1];
for (int nsTile = ai.getAndIncrement(); nsTile < nStiles; nsTile = ai.getAndIncrement()) {
if ( planes[nsTile] != null) {
mod_supertiles[0] = nsTile;
connectionCosts.initConnectionCosts(
set_start_planes,
mod_supertiles,
debugLevel);
double [][][] val_weights = connectionCosts.getValWeights();
for (int np = 0; np < planes[nsTile].length; np++){ // nu
if (planes[nsTile][np] != null) {
planes[nsTile][np].setStarValueWeight(val_weights[0][np]);
}
}
}
}
}
};
}
ImageDtt.startAndJoin(threads);
}
public void updateStarValueStrength(
final int [] mod_supertiles,
final double orthoWeight,
final double diagonalWeight,
final double starPwr, // Divide cost by number of connections to this power
final double starWeightPwr, // Use this power of tile weight when calculating connection cost
final double weightToDens, // Balance weighted density against density. 0.0 - density, 1.0 - weighted density
final double starValPwr, // Raise value of each tile before averaging
final int steps,
final TilePlanes.PlaneData [][] planes,
final boolean preferDisparity,
final int debugLevel)
{
final int tilesX = st.tileProcessor.getTilesX();
final int tilesY = st.tileProcessor.getTilesY();
final int superTileSize = st.tileProcessor.getSuperTileSize();
// final int tileSize = tileProcessor.getTileSize();
final int stilesX = (tilesX + superTileSize -1)/superTileSize;
final int stilesY = (tilesY + superTileSize -1)/superTileSize;
final TileSurface.TileNeibs tnSurface = st.tileSurface.new TileNeibs(stilesX, stilesY);
final Thread[] threads = ImageDtt.newThreadArray(st.tileProcessor.threadsMax);
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
ConnectionCosts connectionCosts = new ConnectionCosts(
orthoWeight, // double orthoWeight,
diagonalWeight, // double diagonalWeight,
starPwr, // double starPwr, // Divide cost by number of connections to this power
starWeightPwr, // Use this power of tile weight when calculating connection cost
weightToDens, // Balance weighted density against density. 0.0 - density, 1.0 - weighted density
starValPwr, //double starValPwr, // Raise value of each tile before averaging
steps, // int steps,
planes, // TilePlanes.PlaneData [][] planes,
tnSurface, // TileSurface.TileNeibs tnSurface,
preferDisparity); // boolean preferDisparity)
int [] supertiles = new int[1];
for (int isTile = ai.getAndIncrement(); isTile < mod_supertiles.length; isTile = ai.getAndIncrement()) {
int nsTile = mod_supertiles[isTile];
if ((nsTile >= 0) && ( planes[nsTile] != null)) {
supertiles[0] = nsTile;
connectionCosts.initConnectionCosts(supertiles, debugLevel - 2);
double [][][] val_weights = connectionCosts.getValWeights();
for (int np = 0; np < planes[nsTile].length; np++){ // nu
if (planes[nsTile][np] != null) {
planes[nsTile][np].setStarValueWeight(val_weights[0][np]);
}
}
}
}
}
};
}
ImageDtt.startAndJoin(threads);
}
}
......@@ -1193,280 +1193,6 @@ public class SuperTiles{
}
public void processPlanes3(
final boolean [] selected, // or null
final double min_disp,
// final boolean invert_disp, // use 1/disparity
final int stMeasSel, // = 1; // Select measurements for supertiles : +1 - combo, +2 - quad +4 - hor +8 - vert
final double plDispNorm,
final int plMinPoints, // = 5; // Minimal number of points for plane detection
final double plTargetEigen, // = 0.1; // Remove outliers until main axis eigenvalue (possibly scaled by plDispNorm) gets below
final double plFractOutliers, // = 0.3; // Maximal fraction of outliers to remove
final int plMaxOutliers, // = 20; // Maximal number of outliers to remove
final boolean plPreferDisparity, // Always start with disparity-most axis (false - lowest eigenvalue)
final GeometryCorrection geometryCorrection,
final boolean correct_distortions,
final boolean smplMode, // = true; // Use sample mode (false - regular tile mode)
final int smplSide, // = 2; // Sample size (side of a square)
final int smplNum, // = 3; // Number after removing worst
final double smplRms, // = 0.1; // Maximal RMS of the remaining tiles in a sample
final int debugLevel,
final int dbg_X,
final int dbg_Y)
{
if (maxMinMax == null) getMaxMinMax(null, null);
final int tilesX = tileProcessor.getTilesX();
final int tilesY = tileProcessor.getTilesY();
final int superTileSize = tileProcessor.getSuperTileSize();
final int tileSize = tileProcessor.getTileSize();
final int stilesX = (tilesX + superTileSize -1)/superTileSize;
final int stilesY = (tilesY + superTileSize -1)/superTileSize;
final int nStiles = stilesX * stilesY;
final Thread[] threads = ImageDtt.newThreadArray(tileProcessor.threadsMax);
final AtomicInteger ai = new AtomicInteger(0);
this.planes = new TilePlanes.PlaneData[nStiles][];
final int debug_stile = (debugLevel > -1)? (dbg_Y * stilesX + dbg_X):-1;
// final boolean [][] dflt_select = {{}, null, null, null, null}; // use layer 0 (combo) only
final boolean [][] dflt_select = new boolean [measuredLayers.getNumLayers()][];
for (int i = 0; i < dflt_select.length; i++){
if ((stMeasSel & (1 << i)) !=0){
dflt_select[i] = new boolean[0];
} else {
dflt_select[i] = null;
}
}
// TODO: Remove when promoting PlaneData
final TilePlanes tpl = new TilePlanes(tileSize,superTileSize, geometryCorrection);
// final double [] disparity = cltPass3d.getDisparity();
// final double [] strength = cltPass3d.getStrength();
measuredLayers.setLayer (
0, // int num_layer,
cltPass3d.getDisparity(), // double [] disparity,
cltPass3d.getStrength(), // double [] strength,
null); // boolean [] selection) // may be null
if (debugLevel > -1) {
String [] titles = {"d0","s0","d1","s1","d2","s2","d3","s3","s","d"};
double [][] dbg_img = new double [titles.length][];
for (int i = 0; i < measuredLayers.getNumLayers(); i++){
dbg_img[2 * i] = measuredLayers.getDisparity(i);
dbg_img[2 * i + 1] = measuredLayers.getStrength(i);
}
dbg_img[8] = cltPass3d.getDisparity();
dbg_img[9] = cltPass3d.getStrength();
showDoubleFloatArrays sdfa_instance = new showDoubleFloatArrays();
sdfa_instance.showArrays(dbg_img, tileProcessor.getTilesX(), tileProcessor.getTilesY(), true, "measuredLayers",titles);
}
// if (maxMinMax == null)
getMaxMinMax(null, null);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nsTile = ai.getAndIncrement(); nsTile < nStiles; nsTile = ai.getAndIncrement()) {
if (nsTile == debug_stile){
System.out.println("processPlanes3(): nsTile="+nsTile);
}
int stileY = nsTile / stilesX;
int stileX = nsTile % stilesX;
int [] sTiles = {stileX, stileY};
planes[nsTile] = null;
// first make a plane from all tiles
TilePlanes.PlaneData pd0 = tpl.new PlaneData (
sTiles, // int [] sTileXY,
tileSize, // int tileSize,
geometryCorrection, // GeometryCorrection geometryCorrection,
correct_distortions,
measuredLayers, // MeasuredLayers measuredLayers,
plPreferDisparity); // boolean preferDisparity)
boolean [][] tile_sel = dflt_select.clone();
for (int i = 0; i < dflt_select.length; i++){
if (dflt_select[i] != null) tile_sel[i] = dflt_select[i].clone();
}
// int dl1 = (nsTile == debug_stile) ? 3 : 0;
int dl = (nsTile == debug_stile) ? 4 : 0;
double [][][] disp_strength = pd0.getPlaneFromMeas(
tile_sel, // boolean [][] tile_sel, // null - do not use, {} use all (will be modified)
null,
min_disp, // double disp_far, // minimal disparity to select (or NaN)
Double.NaN, // double disp_near, // maximal disparity to select (or NaN)
0.0, // plDispNorm, // double dispNorm, // Normalize disparities to the average if above
0.0, // double min_weight,
plMinPoints, // int min_tiles,
strength_floor, //
strength_pow, // double strength_pow,
smplMode,
smplSide,
smplNum,
smplRms,
dl); // int debugLevel)
if (disp_strength != null){ // there are some non-zero tiles, process them (all points, not clustered by disparity value)
boolean OK;
TilePlanes.PlaneData pd0_full = pd0.clone(); //
ArrayList<TilePlanes.PlaneData> st_planes = new ArrayList<TilePlanes.PlaneData>();
// now try to remove outliers
int max_outliers = (int) Math.round(pd0.getNumPoints() * plFractOutliers);
if (max_outliers > plMaxOutliers) max_outliers = plMaxOutliers;
double targetV = corrMaxEigen(
plTargetEigen,
plDispNorm,
pd0);
if (pd0.getValue() > targetV) {
OK = pd0.removeOutliers( // getPlaneFromMeas should already have run
disp_strength,
targetV, // double targetEigen, // target eigenvalue for primary axis (is disparity-dependent, so is non-constant)
max_outliers, // int maxRemoved, // maximal number of tiles to remove (not a constant)
dl); // int debugLevel)
if (!OK) continue;
if (dl > 0) {
if (pd0.getWeight() > 0.3) { // 1.0) {
System.out.println("Removed outliers["+nsTile+"]"+
", stileX="+stileX+
", stileY="+stileY+
", numPoints="+ pd0.getNumPoints()+
", swc = "+pd0.getWeight()+
", center=["+pd0.getZxy()[0]+","+pd0.getZxy()[1]+","+pd0.getZxy()[2]+"]"+
", eig_val = {"+pd0.getValues()[0]+","+pd0.getValues()[1]+","+pd0.getValues()[2]+"}"+
", eig_vect[0] = {"+pd0.getVector()[0]+","+pd0.getVector()[1]+","+pd0.getVector()[2]+"}");
}
}
} // nothing to do if already OK
if (dl > 0) {
System.out.println("Calculating World normal["+nsTile+"]");
}
double swc_common = pd0.getWeight();
double [] norm_xyz = pd0.getWorldXYZ(
correct_distortions,
dl);
if (dl > 0) {
System.out.println("World normal["+nsTile+"] = {"+
norm_xyz[0]+", "+norm_xyz[1]+", "+norm_xyz[2]+"}");
}
st_planes.add(pd0); // adding [0] - all supertile tiles, not clustered by disparity value
// now try for each of the disparity-separated clusters (only for multi-peak histograms)
double [][] mm = maxMinMax[nsTile];
if (mm == null){
// double [][][] dbg_min_max = maxMinMax;
// System.out.println("maxMinMax["+nsTile+"] == null");
}
if ((mm!= null) && (mm.length > 1)) { // multiple maximums - separate into multiple selections // null pointer
for (int m = 0; m < (mm.length +1)/2; m++){
double [] far_near = new double [2];
if (m == 0) {
far_near[0] = Double.NaN; // pd0 already filtered by min_disp
} else {
far_near[0] = mm[2 * m - 1][0];
}
if (m == (mm.length -1)/2) {
far_near[1] = Double.NaN; // pd0 already filtered by min_disp
} else {
far_near[1] = mm[2 * m + 1][0];
}
TilePlanes.PlaneData pd = pd0_full.clone();
OK = (pd.getPlaneFromMeas(
null, // tile_sel, // boolean [][] tile_sel, // null - do not use, {} use all (will be modified)
disp_strength,
far_near[0], // double disp_far, // minimal disparity to select (or NaN)
far_near[1], // double disp_near, // maximal disparity to select (or NaN)
0.0, // plDispNorm, // double dispNorm, // Normalize disparities to the average if above
0.0, // double min_weight,
plMinPoints, // int min_tiles,
strength_floor, //
strength_pow, // double strength_pow,
smplMode,
smplSide,
smplNum,
smplRms,
dl) != null); // int debugLevel)
if (OK) {
if (dl > 0) {
if (swc_common > 1.0) {
System.out.println("Processing subplane["+nsTile+"]["+m+"]"+
", stileX="+stileX+
", stileY="+stileY+
", numPoints="+ pd.getNumPoints()+
", sw = "+swc_common+
", swc = "+pd.getWeight()+
", center=["+pd.getZxy()[0]+","+pd.getZxy()[1]+","+pd.getZxy()[2]+"]"+
", eig_val = {"+pd.getValues()[0]+","+pd.getValues()[1]+","+pd.getValues()[2]+"}"+
", eig_vect[0] = {"+pd.getVector()[0]+","+pd.getVector()[1]+","+pd.getVector()[2]+"}");
}
}
// now try to remove outliers
max_outliers = (int) Math.round(pd.getNumPoints() * plFractOutliers);
if (max_outliers > plMaxOutliers) max_outliers = plMaxOutliers;
targetV = plTargetEigen;
double z0 = pd.getZxy()[0];
if ((plDispNorm > 0.0) && (z0 > plDispNorm)) {
double dd = (plDispNorm + z0)/ plDispNorm; // > 1
targetV *= dd * dd; // > original
}
if (pd.getValues()[0] > targetV) {
OK = pd.removeOutliers( // getPlaneFromMeas should already have run
disp_strength,
targetV, // double targetEigen, // target eigenvalue for primary axis (is disparity-dependent, so is non-constant)
max_outliers, // int maxRemoved, // maximal number of tiles to remove (not a constant)
dl); // int debugLevel)
if (!OK) {
continue;
}
if (dl > 0) {
if (swc_common > 1.0) {
System.out.println("Removed outliers["+nsTile+"]["+m+"]"+
", stileX="+stileX+
", stileY="+stileY+
", numPoints="+ pd.getNumPoints()+
", sw = "+swc_common+
", swc = "+pd.getWeight()+
", center=["+pd.getZxy()[0]+","+pd.getZxy()[1]+","+pd.getZxy()[2]+"]"+
", eig_val = {"+pd.getValues()[0]+","+pd.getValues()[1]+","+pd.getValues()[2]+"}"+
", eig_vect[0] = {"+pd.getVector()[0]+","+pd.getVector()[1]+","+pd.getVector()[2]+"}");
}
}
}
norm_xyz = pd.getWorldXYZ(
correct_distortions);
st_planes.add(pd);
if (dl > 0) {
System.out.println("World normal["+nsTile+"]["+m+"] = {"+
norm_xyz[0]+", "+norm_xyz[1]+", "+norm_xyz[2]+"}");
}
}
}
}
if (st_planes.size() > 0){
planes[nsTile] = st_planes.toArray(new TilePlanes.PlaneData[0] );
if (dl >0){
System.out.println("processPlanes3(): nsTile="+nsTile);
}
}
}
}
}
};
}
ImageDtt.startAndJoin(threads);
}
public int [][] getTransMatrix(
boolean [][][] selections ) // for each plane should have the same non-null ml
{
......@@ -2463,8 +2189,26 @@ public class SuperTiles{
norm_xyz[0]+", "+norm_xyz[1]+", "+norm_xyz[2]+"}");
}
// calculate the world planes too
// if (debugLevel > -1){
pd.getWorldPlaneFromMeas(
plane_selections[nsTile][ps], // tile_sel, // boolean [][] tile_sel, // null - do not use, {} use all (will be modified)
disp_strength[nsTile],
Double.NaN, // double disp_far, // minimal disparity to select (or NaN)
Double.NaN, // double disp_near, // maximal disparity to select (or NaN)
plDispNorm, // 0.0, // plDispNorm, // double dispNorm, // Normalize disparities to the average if above
0.0, // double min_weight,
plMinPoints, // int min_tiles,
strength_floor, //
strength_pow, // double strength_pow,
// update !
smplMode,
smplSide,
smplNum,
smplRms,
dl);
// }
}
}
if (st_planes.size() > 0){
// sort planes by increasing disparity (tile center or plane center ? ) Using plane center
......@@ -2605,7 +2349,7 @@ public class SuperTiles{
smplNum, // final int smplNum, // = 3; // Number after removing worst
smplRms, // final double smplRms, // = 0.1; // Maximal RMS of the remaining tiles in a sample
debugLevel, // final int debugLevel,
debugLevel+1, // final int debugLevel,
dbg_X, // final int dbg_X,
dbg_Y); // final int dbg_Y)
this.planes = new_planes; // save as "measured" (as opposed to "smoothed" by neighbors) planes
......@@ -2613,488 +2357,6 @@ public class SuperTiles{
}
public void processPlanes4(
// final boolean [] selected, // or null
// final double min_disp,
final int stMeasSel, // = 1; // Select measurements for supertiles : +1 - combo, +2 - quad +4 - hor +8 - vert
final double plDispNorm,
final int plMinPoints, // = 5; // Minimal number of points for plane detection
final double plTargetEigen, // = 0.1; // Remove outliers until main axis eigenvalue (possibly scaled by plDispNorm) gets below
final double plFractOutliers, // = 0.3; // Maximal fraction of outliers to remove
final int plMaxOutliers, // = 20; // Maximal number of outliers to remove
final boolean plPreferDisparity, // Always start with disparity-most axis (false - lowest eigenvalue)
final GeometryCorrection geometryCorrection,
final boolean correct_distortions,
final boolean smplMode, // = true; // Use sample mode (false - regular tile mode)
final int smplSide, // = 2; // Sample size (side of a square)
final int smplNum, // = 3; // Number after removing worst
final double smplRms, // = 0.1; // Maximal RMS of the remaining tiles in a sample
final double bin_blur_hor, // Blur disparity histograms for horizontal clusters by this sigma (in bins)
final double bin_blur_vert, // Blur disparity histograms for constant disparity clusters by this sigma (in bins)
final double smallDiff, // = 0.4; // Consider merging initial planes if disparity difference below
final double highMix, //stHighMix = 0.4; // Consider merging initial planes if jumps between ratio above
// final double [] vertical_xyz, // real world up unit vector in camera CS (x - right, y - up, z - to camera};
final int debugLevel,
final int dbg_X,
final int dbg_Y)
{
if (maxMinMax == null) getMaxMinMax(null, null); // so far - no planes, no selection
final int tilesX = tileProcessor.getTilesX();
final int tilesY = tileProcessor.getTilesY();
final int superTileSize = tileProcessor.getSuperTileSize();
final int tileSize = tileProcessor.getTileSize();
final int stilesX = (tilesX + superTileSize -1)/superTileSize;
final int stilesY = (tilesY + superTileSize -1)/superTileSize;
final int nStiles = stilesX * stilesY;
final Thread[] threads = ImageDtt.newThreadArray(tileProcessor.threadsMax);
final AtomicInteger ai = new AtomicInteger(0);
this.planes = new TilePlanes.PlaneData[nStiles][];
final int debug_stile = (debugLevel > -1)? (dbg_Y * stilesX + dbg_X):-1;
// final boolean [][] dflt_select = {{}, null, null, null, null}; // use layer 0 (combo) only
/*
final boolean [][] dflt_select = new boolean [measuredLayers.getNumLayers()][];
for (int i = 0; i < dflt_select.length; i++){
if ((stMeasSel & (1 << i)) !=0){
dflt_select[i] = new boolean[0];
} else {
dflt_select[i] = null;
}
}
*/
// TODO: Remove when promoting PlaneData
final TilePlanes tpl = new TilePlanes(tileSize,superTileSize, geometryCorrection);
// final double [] disparity = cltPass3d.getDisparity();
// final double [] strength = cltPass3d.getStrength();
measuredLayers.setLayer (
0, // int num_layer,
cltPass3d.getDisparity(), // double [] disparity,
cltPass3d.getStrength(), // double [] strength,
null); // boolean [] selection) // may be null
if (debugLevel > -1) {
String [] titles = {"d0","s0","d1","s1","d2","s2","d3","s3","s","d"};
double [][] dbg_img = new double [titles.length][];
for (int i = 0; i < measuredLayers.getNumLayers(); i++){
dbg_img[2 * i] = measuredLayers.getDisparity(i);
dbg_img[2 * i + 1] = measuredLayers.getStrength(i);
}
dbg_img[8] = cltPass3d.getDisparity();
dbg_img[9] = cltPass3d.getStrength();
showDoubleFloatArrays sdfa_instance = new showDoubleFloatArrays();
sdfa_instance.showArrays(dbg_img, tileProcessor.getTilesX(), tileProcessor.getTilesY(), true, "measuredLayers",titles);
}
// getMaxMinMax(
// null, // final double [][][][] disparity_strength, // pre-calculated disparity/strength [per super-tile][per-measurement layer][2][tiles] or null
// null); // final boolean [][] tile_sel // null or per-measurement layer, per-tile selection. For each layer null - do not use, {} - use all
double [] world_hor = {0.0, 1.0, 0.0};
final double [][][][] plane_disp_strength = getPlaneDispStrengths(
world_hor, // final double [] world_plane_norm, // real world normal vector to a suggested plane family (0,1,0) for horizontal planes
stMeasSel, //final int stMeasSel, // = 1; // Select measurements for supertiles : +1 - combo, +2 - quad +4 - hor +8 - vert
plPreferDisparity, // final boolean plPreferDisparity, // Always start with disparity-most axis (false - lowest eigenvalue)
geometryCorrection, // final GeometryCorrection geometryCorrection,
correct_distortions, // final boolean correct_distortions,
smplMode, // final boolean smplMode, // = true; // Use sample mode (false - regular tile mode)
smplSide, //final int smplSide, // = 2; // Sample size (side of a square)
smplNum, //final int smplNum, // = 3; // Number after removing worst
smplRms, //final double smplRms, // = 0.1; // Maximal RMS of the remaining tiles in a sample
debugLevel,
dbg_X,
dbg_Y);
String [] dbg_hist_titles = {"all","hor","mm_all","mm_hor"};
double [][] dbg_hist = new double [dbg_hist_titles.length][];
System.out.println("Calculating histograms for hoirizontal planes");
// resetDisparityHistograms();
setBlurSigma(bin_blur_hor);
final double [][][] mmm_hor = getMaxMinMax(
plane_disp_strength, // final double [][][][] disparity_strength, // pre-calculated disparity/strength [per super-tile][per-measurement layer][2][tiles] or null
null); // final boolean [][] tile_sel // null or per-measurement layer, per-tile selection. For each layer null - do not use, {} - use all
if (debugLevel > -1) {
dbg_hist[1] = showDisparityHistogram().clone();
dbg_hist[3] = showMaxMinMax().clone();
}
// resetDisparityHistograms();
setBlurSigma(bin_blur_vert);
final double [][][] mmm_all = getMaxMinMax(
null, // final double [][][][] disparity_strength, // pre-calculated disparity/strength [per super-tile][per-measurement layer][2][tiles] or null
null); // final boolean [][] tile_sel // null or per-measurement layer, per-tile selection. For each layer null - do not use, {} - use all
if (debugLevel > -1) {
dbg_hist[0] = showDisparityHistogram().clone();
dbg_hist[2] = showMaxMinMax().clone();
}
if (debugLevel > -1) {
int hist_width0 = showDisparityHistogramWidth();
int hist_height0 = dbg_hist[0].length/hist_width0;
showDoubleFloatArrays sdfa_instance = new showDoubleFloatArrays(); // just for debugging?
sdfa_instance.showArrays(dbg_hist, hist_width0, hist_height0, true, "all_hor_histograms",dbg_hist_titles);
}
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nsTile = ai.getAndIncrement(); nsTile < nStiles; nsTile = ai.getAndIncrement()) {
if (nsTile == debug_stile){
System.out.println("processPlanes4(): nsTile="+nsTile);
}
int stileY = nsTile / stilesX;
int stileX = nsTile % stilesX;
int [] sTiles = {stileX, stileY};
planes[nsTile] = null;
// first make a plane from all tiles
TilePlanes.PlaneData pd0 = tpl.new PlaneData (
sTiles, // int [] sTileXY,
tileSize, // int tileSize,
geometryCorrection, // GeometryCorrection geometryCorrection,
correct_distortions,
measuredLayers, // MeasuredLayers measuredLayers,
plPreferDisparity); // boolean preferDisparity)
/*
boolean [][] tile_sel = dflt_select.clone();
for (int i = 0; i < dflt_select.length; i++){
if (dflt_select[i] != null) tile_sel[i] = dflt_select[i].clone();
}
*/
// int dl1 = (nsTile == debug_stile) ? 3 : 0;
int dl = (nsTile == debug_stile) ? 3 : 0;
// plane_disp_strength
if (dl > 2) {
String [] dbg_titles = showSupertileSeparationTitles(plane_disp_strength[nsTile], null);
double [][] dbg_img = showSupertileSeparation(false,plane_disp_strength[nsTile], null);
showDoubleFloatArrays sdfa_instance = new showDoubleFloatArrays();
sdfa_instance.showArrays(dbg_img, 2 * superTileSize, 2* superTileSize, true, "HOR_SEP_DISP"+nsTile,dbg_titles);
dbg_img = showSupertileSeparation(true, plane_disp_strength[nsTile], null);
sdfa_instance.showArrays(dbg_img, 2 * superTileSize, 2* superTileSize, true, "HOR_SEP_WORLD"+nsTile,dbg_titles);
}
ArrayList<TilePlanes.PlaneData> st_planes = new ArrayList<TilePlanes.PlaneData>();
double[][][] disp_strength = new double[measuredLayers.getNumLayers()][][];
for (int ml = 0; ml < disp_strength.length; ml++) if ((stMeasSel & ( 1 << ml)) != 0){
if (smplMode) {
disp_strength[ml] = measuredLayers.getDisparityStrength(
ml, // int num_layer,
stileX, // int stX,
stileY, // int stY,
null, // boolean [] sel_in,
strength_floor, // double strength_floor,
strength_pow, // double strength_pow,
smplSide, // int smplSide, // = 2; // Sample size (side of a square)
smplNum, //int smplNum, // = 3; // Number after removing worst (should be >1)
smplRms, //double smplRms, // = 0.1; // Maximal RMS of the remaining tiles in a sample
true); // boolean null_if_none);
} else {
disp_strength[ml] = measuredLayers.getDisparityStrength(
ml, // int num_layer,
stileX, // int stX,
stileY, // int stY,
null, // boolean [] sel_in,
strength_floor, // double strength_floor,
strength_pow, // double strength_pow,
true); // boolean null_if_none);
}
}
boolean OK;
double [][] mm = maxMinMax[nsTile];
if (mm == null){
// double [][][] dbg_min_max = maxMinMax;
// System.out.println("maxMinMax["+nsTile+"] == null");
continue;
}
double [][] max_only = new double [(mm.length + 1)/2][2];
for (int i = 0; i < max_only.length; i++){
max_only[i] = mm[2 * i];
}
boolean [][][] plane_sels = null;
int num_ml = disp_strength.length;
// int num_p = max_only.length;
int num_tiles = 4 * superTileSize * superTileSize;
int [] num_sel;
for (int iter = 0; iter < 2; iter ++){
int num_p = max_only.length;
plane_sels = new boolean[num_p][num_ml][];
num_sel = new int [num_p];
for (int np = 0; np < num_p; np++) {
for (int ml = 0; ml < num_ml; ml++) if (disp_strength[ml] != null) {
plane_sels[np][ml] = new boolean[num_tiles];
}
}
// compare closest to be able to use tilted planes later
for (int ml = 0; ml < num_ml; ml++) if (disp_strength[ml] != null) {
for (int indx = 0; indx < num_tiles; indx++) if (disp_strength[ml][1][indx] > 0.0){
int best_plane = -1;
double best_d2 = Double.NaN;
for (int np = 0; np < num_p; np++) {
double d2 = max_only[np][0] - disp_strength[ml][0][indx];
// add disp_norm correction here?
d2 *= d2;
if (!(d2 >= best_d2)){
best_d2 = d2;
best_plane = np;
}
}
if (best_plane >= 0){ // compare to max diff here too
plane_sels[best_plane][ml][indx] = true; // so far exclusive
}
}
}
// recalculate average disparities for each plane and show number of tiles in each in debug mode
for (int np = 0; np < num_p; np++) {
double sd = 0.0, sw = 0.0;
int nt = 0;
for (int ml = 0; ml < num_ml; ml++) if (disp_strength[ml] != null) {
for (int indx = 0; indx < num_tiles; indx++) if (plane_sels[np][ml][indx]){
double w = disp_strength[ml][1][indx];
sd += w * disp_strength[ml][0][indx];
sw += w;
num_sel[np]++;
}
}
if (sw > 0) {
sd /= sw;
}
if (dl > 0) {
System.out.println("plane num_sel["+np+"] = "+num_sel[np]+" disp "+max_only[np][0]+"->"+sd+
", weight "+max_only[np][1]+"->"+sw);
}
max_only[np][0] = sd;
max_only[np][1] = sw;
}
// calculate transitions matrix (to find candidates for merge
int [][] trans_mat = getTransMatrix(plane_sels);
double [][] rel_trans = getTransRel(trans_mat);
if (dl > 0) {
System.out.println("trans_mat = ");
for (int i = 0; i < trans_mat.length; i++){
System.out.print(i+": ");
for (int j = 0; j < trans_mat[i].length; j++){
System.out.print(trans_mat[i][j]+" ");
}
System.out.println();
}
System.out.println("rel_trans = ");
for (int i = 0; i < rel_trans.length; i++){
System.out.print(i+": ");
for (int j = 0; j < rel_trans[i].length; j++){
System.out.print(rel_trans[i][j]+" ");
}
System.out.println();
}
}
if ((iter > 0 ) && (num_p > 1)){ // remove /join bad
int windx = 0;
int remove_indx = -1;
for (int i = 1; i < num_p; i++) if (num_sel[i] < num_sel[windx]) windx = i;
if (num_sel[windx] < plMinPoints) {
if (debugLevel > 0){
System.out.println ("processPlanes(): stileX = "+stileX+" stileY="+stileY+
": removing plane "+windx+" with "+num_sel[windx]+" tiles ( <"+plMinPoints+")");
}
remove_indx = windx;
}
if (remove_indx < 0) {
// find candidates for merge
windx = -1;
for (int i = 0; i < (num_p - 1); i++) {
if (((max_only[i+1][0] - max_only[i][0]) < smallDiff) && // close enough to consider merging
(rel_trans[i][i+1] > highMix)) {
if ((windx < 0) || (rel_trans[i][i+1] > rel_trans[windx][windx+1])) windx = i;
}
}
if (windx >=0 ) {
if (debugLevel > 0){
System.out.println ("processPlanes(): stileX = "+stileX+" stileY="+stileY+
": merging plane "+windx+" with " + (windx + 1)+": "+
num_sel[windx] + " and "+num_sel[windx+1]+" tiles, "+
" rel_trans="+rel_trans[windx][windx + 1]+ " ( > " + highMix+"),"+
" diff="+ (max_only[windx + 1][0]- max_only[windx][0]) + " ( < " + smallDiff+" ),"+
" disp1 = "+max_only[windx][0]+" disp2 = "+max_only[windx + 1][0]);
}
double sum_w = max_only[windx][1] + max_only[windx + 1][1];
max_only[windx+1][0] = (max_only[windx][0]*max_only[windx][1] + max_only[windx+1][0]*max_only[windx+1][1]) / sum_w;
max_only[windx+1][1] = sum_w;
remove_indx = windx;
}
}
if (remove_indx >= 0){
double [][] max_only_copy = max_only.clone();
for (int i = 0; i < max_only.length; i++) max_only_copy[i] = max_only[i];
max_only = new double [max_only.length - 1][];
int indx = 0;
for (int i = 0; i < max_only_copy.length; i++) if (i != remove_indx) max_only[indx++] =max_only_copy[i];
iter = 0;
continue; // restart from 0
}
// Show other candidates for merge
if (debugLevel > 0){
double max_sep = 0.2;
if (iter > 0) {
for (int i = 0; i < (num_p-1); i++){
if (rel_trans[i][i+1] > max_sep) {
System.out.println("processPlanes4() stileX = "+stileX+" stileY="+stileY+" lowplane = "+i+
" num_sel1 = "+num_sel[i] + " num_sel2 = "+num_sel[i+1] +
" rel_trans="+rel_trans[i][i+1]+
" diff="+ (max_only[i+1][0]- max_only[i][0]) +
" disp1 = "+max_only[i][0]+" disp2 = "+max_only[i+1][0]);
}
}
}
}
}
}
if (dl > 2) {
String [] dbg_titles = showSupertileSeparationTitles( disp_strength, plane_sels);
double [][] dbg_img = showSupertileSeparation(false,disp_strength, plane_sels);
showDoubleFloatArrays sdfa_instance = new showDoubleFloatArrays();
sdfa_instance.showArrays(dbg_img, 2 * superTileSize, 2* superTileSize, true, "initial_separation_disp"+nsTile,dbg_titles);
dbg_img = showSupertileSeparation(true, disp_strength, plane_sels);
sdfa_instance.showArrays(dbg_img, 2 * superTileSize, 2* superTileSize, true, "initial_separation_world"+nsTile,dbg_titles);
}
if (dl > 2) {
double [] world_hor = {0.0, 1.0, 0.0};
double sd = 0.0, sw = 0.0;
for (int i = 0; i < max_only.length; i++){
sd += max_only[i][0] * max_only[i][1];
sw += max_only[i][1];
}
if (sw > 0) {
System.out.println("Horizontally tilted disparity for stileX = "+stileX+" stileY="+stileY+", average disparity "+(sd/sw));
double [][][] hor_disp_strength = pd0.getDisparityToPlane(
world_hor, // double [] world_normal_xyz,
sd / sw, // average disparity // double disp_center,
null, // boolean [][] tile_sel, // null - do not use, {} use all (will be modified)
disp_strength, // double [][][] disp_str, // calculate just once if null
1); // int debugLevel);
String [] dbg_titles = showSupertileSeparationTitles( hor_disp_strength, plane_sels);
double [][] dbg_img = showSupertileSeparation(false, hor_disp_strength, plane_sels);
showDoubleFloatArrays sdfa_instance = new showDoubleFloatArrays();
sdfa_instance.showArrays(dbg_img, 2 * superTileSize, 2* superTileSize, true, "hor_separation_disp"+nsTile,dbg_titles);
dbg_img = showSupertileSeparation(true, hor_disp_strength, plane_sels);
sdfa_instance.showArrays(dbg_img, 2 * superTileSize, 2* superTileSize, true, "hor_separation_world"+nsTile,dbg_titles);
}
}
for (int m = 0; m < max_only.length; m++) {
// TilePlanes.PlaneData pd = pd0_full.clone();
TilePlanes.PlaneData pd = pd0.clone();
OK = (pd.getPlaneFromMeas(
plane_sels[m], // tile_sel, // boolean [][] tile_sel, // null - do not use, {} use all (will be modified)
disp_strength,
Double.NaN, // double disp_far, // minimal disparity to select (or NaN)
Double.NaN, // double disp_near, // maximal disparity to select (or NaN)
0.0, // plDispNorm, // double dispNorm, // Normalize disparities to the average if above
0.0, // double min_weight,
plMinPoints, // int min_tiles,
strength_floor, //
strength_pow, // double strength_pow,
// update !
smplMode,
smplSide,
smplNum,
smplRms,
dl) != null); // int debugLevel)
if (OK) {
if (dl > 0) {
if (pd.getWeight() > 1.0) {
System.out.println("Processing subplane["+nsTile+"]["+m+"]"+
", stileX="+stileX+
", stileY="+stileY+
", numPoints="+ pd.getNumPoints()+
", swc = "+pd.getWeight()+
", center=["+pd.getZxy()[0]+","+pd.getZxy()[1]+","+pd.getZxy()[2]+"]"+
", eig_val = {"+pd.getValues()[0]+","+pd.getValues()[1]+","+pd.getValues()[2]+"}"+
", eig_vect[0] = {"+pd.getVector()[0]+","+pd.getVector()[1]+","+pd.getVector()[2]+"}");
}
}
// now try to remove outliers
int max_outliers = (int) Math.round(pd.getNumPoints() * plFractOutliers);
if (max_outliers > plMaxOutliers) max_outliers = plMaxOutliers;
double targetV = plTargetEigen;
double z0 = pd.getZxy()[0];
if ((plDispNorm > 0.0) && (z0 > plDispNorm)) {
double dd = (plDispNorm + z0)/ plDispNorm; // > 1
targetV *= dd * dd; // > original
}
if (pd.getValues()[0] > targetV) {
OK = pd.removeOutliers( // getPlaneFromMeas should already have run
disp_strength,
targetV, // double targetEigen, // target eigenvalue for primary axis (is disparity-dependent, so is non-constant)
max_outliers, // int maxRemoved, // maximal number of tiles to remove (not a constant)
dl); // int debugLevel)
if (!OK) {
continue;
}
if (dl > 0) {
if (pd.getWeight() > 1.0) {
System.out.println("Removed outliers["+nsTile+"]["+m+"]"+
", stileX="+stileX+
", stileY="+stileY+
", numPoints="+ pd.getNumPoints()+
", swc = "+pd.getWeight()+
", center=["+pd.getZxy()[0]+","+pd.getZxy()[1]+","+pd.getZxy()[2]+"]"+
", eig_val = {"+pd.getValues()[0]+","+pd.getValues()[1]+","+pd.getValues()[2]+"}"+
", eig_vect[0] = {"+pd.getVector()[0]+","+pd.getVector()[1]+","+pd.getVector()[2]+"}");
}
}
}
double [] norm_xyz = pd.getWorldXYZ(
correct_distortions);
st_planes.add(pd);
if (dl > 0) {
System.out.println("World normal["+nsTile+"]["+m+"] = {"+
norm_xyz[0]+", "+norm_xyz[1]+", "+norm_xyz[2]+"}");
}
}
}
if (st_planes.size() > 0){
st_planes.add(0, st_planes.get(0)); // insert dummy at pos 0;
planes[nsTile] = st_planes.toArray(new TilePlanes.PlaneData[0] );
planes[nsTile][0] = null; // remove dummy
if (dl >0){
System.out.println("processPlanes4(): nsTile="+nsTile);
}
}
// }
}
}
};
}
ImageDtt.startAndJoin(threads);
}
public int [] getShowPlanesWidthHeight()
{
......@@ -4072,6 +3334,7 @@ public class SuperTiles{
public int [] resolveStarConflicts(
int [][][] conflicts,
Conflicts conflict_stats, // to be updated after applying resolution
LinkPlanes lp,
int starSteps, // How far to look around when calculationg connection cost
double orthoWeight,
double diagonalWeight,
......@@ -4104,6 +3367,7 @@ public class SuperTiles{
nsTile,
conflicts[nsTile][nConfl][0], // int nl1,
conflicts[nsTile][nConfl][1], // int nl2,
lp, // LinkPlanes lp,
tnSurface,
conflicts,
conflict_stats, // to be updated after applying resolution
......@@ -4428,6 +3692,7 @@ public class SuperTiles{
int nsTile,
int nl1,
int nl2,
LinkPlanes lp,
TileSurface.TileNeibs tnSurface,
int [][][] conflicts,
Conflicts conflict_stats, // to be updated after applying resolution
......@@ -4739,7 +4004,7 @@ public class SuperTiles{
}
}
// recalculate starValueWeights for and around tiles with modified neighbors (no outside connections changed )nsTiles
updateStarValueStrength(
lp.updateStarValueStrength(
nsTiles, // final int [] mod_supertiles,
orthoWeight, // final double orthoWeight,
diagonalWeight, // final double diagonalWeight,
......@@ -4759,7 +4024,7 @@ public class SuperTiles{
return true;
}
public void calcStarValueStrength(
public void calcStarValueStrengthOld(
final double orthoWeight,
final double diagonalWeight,
final double starPwr, // Divide cost by number of connections to this power
......@@ -4814,7 +4079,7 @@ public class SuperTiles{
ImageDtt.startAndJoin(threads);
}
public void updateStarValueStrength(
public void updateStarValueStrengthOld(
final int [] mod_supertiles,
final double orthoWeight,
final double diagonalWeight,
......@@ -5120,6 +4385,7 @@ public class SuperTiles{
}
public void resolveConflicts(
LinkPlanes lp,
double maxEigen, // maximal eigenvalue of planes to consider
boolean conflDualTri, // Resolve dual triangles conflict (odoodo)
boolean conflMulti, // Resolve multiple odo triangles conflicts
......@@ -5141,7 +4407,8 @@ public class SuperTiles{
int dbg_Y)
{
calcStarValueStrength(
lp.calcStarValueStrength(
true, // boolean set_start_planes,
orthoWeight, // final double orthoWeight,
diagonalWeight, // final double diagonalWeight,
starPwr, // final double starPwr, // Divide cost by number of connections to this power
......@@ -5225,6 +4492,7 @@ public class SuperTiles{
conflict_star_results = resolveStarConflicts(
conflicts0, // int [][][] conflicts,
conflicts0_stats,
lp, // LinkPlanes lp,
starSteps, // How far to look around when calculationg connection cost
orthoWeight, // double orthoWeight,
diagonalWeight, // double diagonalWeight,
......@@ -6777,6 +6045,26 @@ public class SuperTiles{
System.out.println("=== BUG in applyMergePlanes(): failed to getPlaneFromMeas() for merged planes");
break;
}
this_pd.getWorldPlaneFromMeas( // re-calculate world-based planes too
null, // tile_sel, // boolean [][] tile_sel, // null - do not use, {} use all (will be modified)
disp_strength,
disp_far, // double disp_far, // minimal disparity to select (or NaN)
disp_near, // double disp_near, // maximal disparity to select (or NaN)
dispNorm, // double dispNorm, // Normalize disparities to the average if above
min_weight, // double min_weight,
min_tiles, // int min_tiles,
strength_floor, // double strength_floor,
strength_pow, // double strength_pow,
//OK?
smplMode,
smplSide,
smplNum,
smplRms,
dl); // int debugLevel)
// remove outliers //removeOutliers
// now try to remove outliers
int max_outliers = (int) Math.round(this_pd.getNumPoints() * fractOutliers);
......@@ -7116,7 +6404,24 @@ public class SuperTiles{
max_outliers, // int maxRemoved, // maximal number of tiles to remove (not a constant)
dl); // int debugLevel)
if (!OK) break;
}
}
bpd[np][npip].getWorldPlaneFromMeas(
null, // boolean [][] tile_sel, // null - do not use, {} use all (will be modified)
disp_strength,
disp_far, // double disp_far, // minimal disparity to select (or NaN)
disp_near, // double disp_near, // maximal disparity to select (or NaN)
dispNorm, // double dispNorm, // Normalize disparities to the average if above
min_weight, // double min_weight,
min_tiles, // int min_tiles,
strength_floor, // double strength_floor,
strength_pow, // double strength_pow,
// OK?
smplMode,
smplSide,
smplNum,
smplRms,
dl); // int debugLevel)
}
if (!OK) {
brokenPd[nsTile][np] = null;
......
......@@ -86,11 +86,24 @@ public class TilePlanes {
int smplNum = 3; // Number after removing worst
double smplRms = 0.1; // Maximal RMS of the remaining tiles in a sample
double [] starValueWeight = null;
double [] starValueWeight = null;
PlaneData starPlane = null;
PlaneData nonexclusiveStar = null;
PlaneData nonexclusiveStarEq = null;
double conn_density = Double.NaN; //
boolean preferDisparity = false;
// alternative "plane" calcualtions in the world coordinates
double [] wxyz = null; // [3] - plane center point when calculated in world coordinates (x, y , z)
double [][] wvectors = null; // [3][3] - eigenvectors calculated in the real world
double [] wvalues = null; // [3] -eigenvalues calculated in the real world
double [][] merged_weig_val = null; // for each of the directions (N, NE, .. NW) quality match for each layer
double [][] merged_weig_eq = null; // for each of the directions (N, NE, .. NW) quality match for each layer - ignoring weights
public PlaneData clone(){
PlaneData pd = new PlaneData(
this.sTileXY,
......@@ -103,21 +116,20 @@ public class TilePlanes {
pd.weight = this.weight;
if (this.plane_sel != null) pd.plane_sel = this.plane_sel.clone();
if (this.values != null) pd.values = this.values.clone();
if (this.zxy != null) pd.zxy = this.zxy.clone();
// World calculations should be invalidated during cloning?
/*
if (this.center_xyz != null) pd.center_xyz = this.center_xyz.clone();
if (this.world_xyz != null) pd.world_xyz = this.world_xyz.clone();
if (this.world_v1 != null) pd.world_v1 = this.world_v1.clone();
if (this.world_v2 != null) pd.world_v2 = this.world_v2.clone();
*/
if (this.vectors != null) {
pd.vectors = new double[3][];
pd.vectors[0] = this.vectors[0].clone();
pd.vectors[1] = this.vectors[1].clone();
pd.vectors[2] = this.vectors[2].clone();
}
// Adding cloning of the calculated center_xyz and world_xyz (normal). Check that it did not break anything
if (this.center_xyz != null) pd.center_xyz = this.center_xyz.clone();
if (this.world_xyz != null) pd.world_xyz = this.world_xyz.clone();
if (this.world_v1 != null) pd.world_v1 = this.world_v1.clone();
if (this.world_v2 != null) pd.world_v2 = this.world_v2.clone();
if (this.measuredLayers != null) pd.measuredLayers = this.measuredLayers;
pd.setMeasSelection(this.measuredSelection);
......@@ -143,23 +155,137 @@ public class TilePlanes {
if (starValueWeight != null){
pd.starValueWeight = starValueWeight.clone();
}
if (this.starPlane != null) pd.starPlane = this.starPlane;
if (this.nonexclusiveStar != null) pd.nonexclusiveStar = this.nonexclusiveStar;
if (this.nonexclusiveStarEq != null) pd.nonexclusiveStarEq = this.nonexclusiveStarEq;
pd.conn_density = this.conn_density;
//
if (this.wxyz != null) pd.wxyz = this.wxyz.clone();
if (this.wvalues != null) pd.wvalues = this.wvalues.clone();
if (this.wvectors != null) {
pd.wvectors = new double[3][];
pd.wvectors[0] = this.wvectors[0].clone();
pd.wvectors[1] = this.wvectors[1].clone();
pd.wvectors[2] = this.wvectors[2].clone();
}
return pd;
}
// public void setConnectionDensity(double density){
// conn_density = density;
// }
public String getNeibString()
{
if (neib_best == null) {
return "[ undefined ]";
}
String s = "[";
for (int dir = 0; dir < 8; dir++){
s += (neib_best[dir]>=0) ? neib_best[dir]:"x";
if (dir < 7) s += ", ";
}
s+= "] ";
return s;
}
public boolean isHorizontal(){
if (wvectors != null){
return (Math.abs(wvectors[0][1]) > 0.99);
}
return false;
}
public String toString()
{
String s = " ";
s += getNeibString();
if (isHorizontal()){
s+= "HORIZONTAL ";
}
s += String.format( "np=%3d weight= %8.5f", num_points, weight);
if (starValueWeight != null) s += String.format(" star=[%8.5f, %8.5f]", starValueWeight[0], starValueWeight[1]);
else s += " star= null";
s += String.format(" dens=%8.5f", conn_density);
if (zxy != null) s += String.format("\nzxy = [%8.3f, %8.3f, %8.3f] (pix)",zxy[0],zxy[1],zxy[2]);
else s += "\nzxy = null";
if (values != null) s += String.format(", values = [%8.3f, %8.3f, %8.3f] pix^2",values[0],values[1],values[2]);
else s += " values = null";
if (vectors != null) s += String.format("\nvectors = [%8.5f, %8.5f, %8.5f], [%8.5f, %8.5f, %8.5f], [%8.5f, %8.5f, %8.5f]",
vectors[0][0],vectors[0][1],vectors[0][2], vectors[1][0],vectors[1][1],vectors[1][2], vectors[2][0],vectors[2][1],vectors[2][2]);
if (center_xyz != null) s += String.format("\ncenter = [%8.2f, %8.2f, %8.2f]",center_xyz[0],center_xyz[1],center_xyz[2]);
else s += "\ncenter = null";
if (world_xyz != null) s += String.format(" normal = [%8.2f, %8.2f, %8.2f] (m)",world_xyz[0],world_xyz[1],world_xyz[2]);
else s += " normal = null";
if (wxyz != null) s += String.format("\nwxyz = [%8.2f, %8.2f, %8.2f] (m)",wxyz[0],wxyz[1],wxyz[2]);
else s += "\nwxyz = null";
if (wvalues != null) s += String.format(" wvals = [%8.2f, %8.2f, %8.2f] (m^2)",wvalues[0],wvalues[1],wvalues[2]);
else s += " wvals = null";
if (wvectors != null) s += String.format("\nwvect = [%8.5f, %8.5f, %8.5f], [%8.5f, %8.5f, %8.5f], [%8.5f, %8.5f, %8.5f]",
wvectors[0][0],wvectors[0][1],wvectors[0][2], wvectors[1][0],wvectors[1][1],wvectors[1][2], wvectors[2][0],wvectors[2][1],wvectors[2][2]);
if (nonexclusiveStar != null){
s+= "\nweighted: ";
s+= nonexclusiveStar.getNeibString();
if (nonexclusiveStar.isHorizontal()){
s+= "HORIZONTAL ";
}
s += String.format( "np=%3d weight= %8.5f", nonexclusiveStar.num_points, nonexclusiveStar.weight);
if (nonexclusiveStar.center_xyz != null) s += String.format("\n--center =[%8.2f, %8.2f, %8.2f]",
nonexclusiveStar.center_xyz[0],nonexclusiveStar.center_xyz[1],nonexclusiveStar.center_xyz[2]);
else s += "\n--ncenter = null";
if (nonexclusiveStar.world_xyz != null) s += String.format(" normal = [%8.2f, %8.2f, %8.2f] (m)",
nonexclusiveStar.world_xyz[0],nonexclusiveStar.world_xyz[1],nonexclusiveStar.world_xyz[2]);
else s += " normal = null";
}
if (nonexclusiveStarEq != null){
s+= "\nequalized:";
s+= nonexclusiveStarEq.getNeibString();
if (nonexclusiveStar.isHorizontal()){
s+= "HORIZONTAL ";
}
s += String.format( "np=%3d weight= %8.5f", nonexclusiveStarEq.num_points, nonexclusiveStarEq.weight);
if (nonexclusiveStarEq.center_xyz != null) s += String.format("\n--center =[%8.2f, %8.2f, %8.2f]",
nonexclusiveStarEq.center_xyz[0],nonexclusiveStarEq.center_xyz[1],nonexclusiveStarEq.center_xyz[2]);
else s += "\n--ncenter = null";
if (nonexclusiveStarEq.world_xyz != null) s += String.format(" normal = [%8.2f, %8.2f, %8.2f] (m)",
nonexclusiveStarEq.world_xyz[0],nonexclusiveStarEq.world_xyz[1],nonexclusiveStarEq.world_xyz[2]);
else s += " normal = null";
}
s+="\n\n";
return s;
}
public PlaneData getNonexclusiveStar()
{
return this.nonexclusiveStar;
}
public void setNonexclusiveStar( PlaneData pd)
{
this.nonexclusiveStar = pd;
}
public PlaneData getNonexclusiveStarEq()
{
return this.nonexclusiveStarEq;
}
public void setNonexclusiveStarEq( PlaneData pd)
{
this.nonexclusiveStarEq = pd;
}
public PlaneData getStarPlane()
{
return this.starPlane;
}
public void setStarPlane( PlaneData pd)
{
this.starPlane = pd;
}
public double getConnectionDensity(){
return conn_density;
}
// public void setStarValueWeight(double value, double weight){
// this.starValueWeight = new double[2];
// this.starValueWeight[0] = value;
// this.starValueWeight[1] = weight;
// System.out.println("setStarValueWeight(): conn_density is not set");
// }
public void setStarValueWeight(double[] val_weight){
this.starValueWeight = new double[2];
......@@ -175,6 +301,11 @@ public class TilePlanes {
{
return starValueWeight;
}
public double [] getStarValueWeightDensity()
{
double [] vwd = {starValueWeight[0], starValueWeight[1], conn_density};
return vwd;
}
public void setSelMask (boolean []sel_mask)
......@@ -263,6 +394,23 @@ public class TilePlanes {
}
}
if (src.merged_weig_val != null){
dst.merged_weig_val = src.merged_weig_val.clone();
for (int i = 0; i < src.merged_weig_val.length; i++){
if (src.merged_weig_val[i] != null){
dst.merged_weig_val[i] = src.merged_weig_val[i].clone();
}
}
}
if (src.merged_weig_eq != null){
dst.merged_weig_eq = src.merged_weig_eq.clone();
for (int i = 0; i < src.merged_weig_eq.length; i++){
if (src.merged_weig_eq[i] != null){
dst.merged_weig_eq[i] = src.merged_weig_eq[i].clone();
}
}
}
if (src.merged_valid != null){
dst.merged_valid = src.merged_valid.clone();
......@@ -883,11 +1031,12 @@ public class TilePlanes {
* @param smplRms maximal square root of variance (in disparity pixels) to accept the result
*
* @param debugLevel debug level
* @return per measurement layer disparity/strengths, or null if failed
* @return per measurement layer : x,y,z, weight, or null if failed. This
* value may be re-used in subsequent refinements (as removing outliers)
*/
public double [][][] getPlaneFromMeas(
boolean [][] tile_sel, // null - do not use, {} use all (will be modified)
double [][][] disp_str, // calculate just once when removing outlayers
double [][][] disp_str, // calculate just once when removing outliers
double disp_far, // minimal disparity to select (or NaN)
double disp_near, // maximal disparity to select (or NaN)
double dispNorm, // Normalize disparities to the average if above
......@@ -1113,17 +1262,343 @@ public class TilePlanes {
// find vector most orthogonal to view // (anyway it all works with that assumption), make it first
// TODO normalize to local linear scales
int oindx = 0;
if (preferDisparity) {
for (int i = 1; i <3; i++){
if (Math.abs(eig_vect[0][i]) > Math.abs(eig_vect[0][oindx])){
oindx = i;
}
}
} else {
for (int i = 1; i < 3 ; i++){
if (eig_val[i][i] < eig_val[oindx][oindx]){
oindx = i;
}
if (preferDisparity) {
for (int i = 1; i <3; i++){
if (Math.abs(eig_vect[0][i]) > Math.abs(eig_vect[0][oindx])){
oindx = i;
}
}
} else {
for (int i = 1; i < 3 ; i++){
if (eig_val[i][i] < eig_val[oindx][oindx]){
oindx = i;
}
}
}
if (eig_val[oindx][oindx] == 0.0){
System.out.println("getPlane(): zero eigenvalue!!");
}
// select 2 other axes for increasing eigenvalues (so v is short axis, h is the long one)
int vindx = (oindx == 0)? 1 : 0;
int hindx = (oindx == 0)? 2 : ((oindx == 1) ? 2 : 1);
if (eig_val[vindx][vindx] > eig_val[hindx][hindx]){
int tmp = vindx;
vindx = hindx;
hindx = tmp;
}
double [][] plane = {
{eig_vect[0][oindx],eig_vect[1][oindx],eig_vect[2][oindx]}, // plane normal to camera
{eig_vect[0][vindx],eig_vect[1][vindx],eig_vect[2][vindx]}, // "horizontal" axis // to detect skinny planes and poles
{eig_vect[0][hindx],eig_vect[1][hindx],eig_vect[2][hindx]}}; // "vertical" axis // to detect skinny planes and poles
// Make normal be towards camera (positive disparity), next vector - positive in X direction (right)
for (int v = 0; v < 2; v++) {
if (plane[v][v] < 0.0) for (int i = 0; i < 3; i ++) plane[v][i] = -plane[v][i];
}
// make direction last vector so px (x) py (.) disp < 0 (left-hand coordinate system)
if (new Matrix(plane).det() > 0){
for (int i = 0; i < 3; i ++) plane[2][i] = -plane[2][i];
}
setZxy(swz, swx, swy);
setWeight(sw);
setValues(eig_val[oindx][oindx],eig_val[vindx][vindx],eig_val[hindx][hindx]); // eigenvalues [0] - thickness, 2 other to detect skinny (poles)
setVectors (plane);
setNumPoints (num_tiles);
boolean [] plane_sel = null;
boolean need_clone = true;
for (int nl = 0; nl < tile_sel.length; nl++){
if (tile_sel[nl] != null) {
if (plane_sel == null) {
plane_sel = tile_sel[nl];
} else {
if (need_clone) {
plane_sel = plane_sel.clone();
need_clone = false;
}
for (int i = 0; i < plane_sel.length; i++){
plane_sel[i] |= tile_sel[nl][i];
}
}
}
}
setPlaneSelection(plane_sel);
return disp_str;
}
// similar to getPlaneFromMeas, but building ellipsoids in the real world space
public double [][][] getWorldPlaneFromMeas(
boolean [][] tile_sel, // null - do not use, {} use all (will be modified)
// TODO: mame it accept tiles_xyzw (same as output)
double [][][] disp_str, // calculate just once when removing outliers
double disp_far, // minimal disparity to select (or NaN)
double disp_near, // maximal disparity to select (or NaN)
double dispNorm, // Normalize disparities to the average if above
double min_weight,
int min_tiles,
double strength_floor,
double strength_pow,
boolean smplMode, // = true; // Use sample mode (false - regular tile mode)
int smplSide, // = 2; // Sample size (side of a square)
int smplNum, // = 3; // Number after removing worst
double smplRms, // = 0.1; // Maximal RMS of the remaining tiles in a sample
int debugLevel)
{
double mindet = 1E-15;
int stSize2 = 2 * stSize;
int num_tiles = 0;
double sw = 0.0;
if (tile_sel != null) {
this.measuredSelection = tile_sel;
} else {
tile_sel = this.measuredSelection;
}
this.strength_floor = strength_floor;
this.measured_strength_pow = strength_pow;
this.min_weight = min_weight;
this.min_tiles = min_tiles;
this.dispNorm = dispNorm;
this.smplMode = smplMode; // = true; // Use sample mode (false - regular tile mode)
this.smplSide = smplSide; // = 2; // Sample size (side of a square)
this.smplNum = smplNum; // = 3; // Number after removing worst
this.smplRms = smplRms; // = 0.1; // Maximal RMS of the remaining tiles in a sample
if (debugLevel > 2){
System.out.println("getWorldPlaneFromMeas()");
}
boolean need_disp_str = false;
if (disp_str == null) {
disp_str = new double [tile_sel.length][][];
need_disp_str = true;
}
// TODO: Code duplication with getWorldPlaneFromMeas() - extract common part
for (int nl = 0; nl < tile_sel.length; nl++){
if (tile_sel[nl] != null){
if (smplMode) {
if (need_disp_str) {
disp_str[nl] = measuredLayers.getDisparityStrength( // expensive to calculate (improve removing outlayers
nl, // int num_layer,
sTileXY[0], // int stX,
sTileXY[1], // int stY,
((tile_sel[nl].length == 0)? null:tile_sel[nl]), // boolean [] sel_in,
//tile_sel[nl], // boolean [] sel_in,
strength_floor,
strength_pow, //
smplSide, // = 2; // Sample size (side of a square)
smplNum, // = 3; // Number after removing worst
smplRms, // = 0.1; // Maximal RMS of the remaining tiles in a sample
true); // boolean null_if_none)
}
if (disp_str[nl] == null) continue;
if (Double.isNaN(disp_far) && Double.isNaN(disp_near)){
tile_sel[nl] = measuredLayers.getSupertileSelection(
disp_str[nl],
((tile_sel[nl].length == 0)? null:tile_sel[nl]), // boolean [] sel_in,
true); // boolean null_if_none)
} else {
tile_sel[nl] = measuredLayers.getSupertileSelection(
disp_str[nl],
((tile_sel[nl].length == 0)? null:tile_sel[nl]), // boolean [] sel_in,
disp_far, // double disp_far,
disp_near, // double disp_near,
true); // boolean null_if_none)
}
sw += MeasuredLayers.getSumStrength(disp_str[nl],tile_sel[nl]);
num_tiles += MeasuredLayers.getNumSelected(tile_sel[nl]);
} else {
if (Double.isNaN(disp_far) && Double.isNaN(disp_near)){
tile_sel[nl] = measuredLayers.getSupertileSelection(
nl, // int num_layer,
sTileXY[0], // int stX,
sTileXY[1], // int stY,
((tile_sel[nl].length == 0)? null:tile_sel[nl]), // boolean [] sel_in,
strength_floor,
true); // boolean null_if_none)
} else {
tile_sel[nl] = measuredLayers.getSupertileSelection(
nl, // int num_layer,
sTileXY[0], // int stX,
sTileXY[1], // int stY,
((tile_sel[nl].length == 0)? null:tile_sel[nl]), // boolean [] sel_in,
disp_far, // double disp_far,
disp_near, // double disp_near,
strength_floor,
true); // boolean null_if_none)
}
num_tiles += MeasuredLayers.getNumSelected(tile_sel[nl]);
if (tile_sel[nl] != null){
disp_str[nl] = measuredLayers.getDisparityStrength(
nl, // int num_layer,
sTileXY[0], // int stX,
sTileXY[1], // int stY,
tile_sel[nl], // boolean [] sel_in,
strength_floor,
strength_pow, //
true); // boolean null_if_none)
sw += MeasuredLayers.getSumStrength(disp_str[nl]);
}
}
if ((debugLevel > 3) && (disp_str[nl] != null)){
// if ((debugLevel > 1) && (disp_str[nl] != null)){
showDoubleFloatArrays sdfa_instance = new showDoubleFloatArrays();
double [][] dbg_img = new double [3][];
dbg_img[0] = disp_str[nl][0];
dbg_img[1] = disp_str[nl][1];
dbg_img[2] = new double [stSize2*stSize2];
for (int i = 0; i < dbg_img[2].length; i++){
dbg_img[2][i] = tile_sel[nl][i]?1.0:0.0;
}
sdfa_instance.showArrays(dbg_img, stSize2, stSize2, true, "disp_str_x"+sTileXY[0]+"_y"+sTileXY[1]+"_"+nl);
}
}
}
this.measuredSelection = tile_sel; // it may be modified
if ((sw < min_weight) || (num_tiles < min_tiles)) {
if (debugLevel > 1){
System.out.println("getWorldPlaneFromMeas():return false");
}
return null; // too weak plane or too few tiles selected
}
double [][][] tiles_xyzw = new double [disp_str.length][][];
for (int nl = 0; nl < tile_sel.length; nl++) if (disp_str[nl] != null) {
tiles_xyzw[nl] = new double [disp_str[nl][0].length][];
}
double [][] acovar = new double [3][3];
double swz = 0.0, swx = 0.0, swy = 0.0;
sw =0.0;
double [] px_py = getCenterPxPy();
for (int nl = 0; nl < tile_sel.length; nl++){
if (disp_str[nl] != null) {
for (int indx = 0; indx < disp_str[nl][0].length; indx++){
if (tile_sel[nl][indx]) {
double w = disp_str[nl][1][indx];
if (w > 0.0){
tiles_xyzw[nl][indx] = new double [4];
double d = disp_str[nl][0][indx];
// referencing samples to centers of pixels
double x = ((indx % stSize2) - stSize + 0.5) * tileSize + 0.5 + px_py[0]; // in pixels, not in tiles
double y = ((indx / stSize2) - stSize + 0.5) * tileSize + 0.5 + px_py[1];
// difference from getPlaneFromMeas
double [] wxyz = geometryCorrection.getWorldCoordinates(
x,
y,
d,
this.correctDistortions);
tiles_xyzw[nl][indx][0] = wxyz[0];
tiles_xyzw[nl][indx][1] = wxyz[1];
tiles_xyzw[nl][indx][2] = wxyz[2];
tiles_xyzw[nl][indx][3] = w;
sw += w;
swz += w * wxyz[2];
swx += w * wxyz[0];
swy += w * wxyz[1];
// end of difference from getPlaneFromMeas
}
}
}
}
}
if (sw == 0.0) {
return null; //
}
swz /= sw;
swx /= sw;
swy /= sw;
setWxyz(swx, swy, swz);
// double kz = ((dispNorm > 0.0) && (swz > dispNorm)) ? (dispNorm / swz) : 1.0;
if (debugLevel > 0){
System.out.println("getPlaneFromMeas(): num_tiles="+num_tiles+", sw = "+sw +", swz = "+swz +", swx = "+swx +", swy = "+swy);
}
// TODO: scale disparity to make same scale for 3 axes?
for (int nl = 0; nl < tile_sel.length; nl++){
if (disp_str[nl] != null) {
for (int indx = 0; indx < disp_str[nl][0].length; indx++){
if (tiles_xyzw[nl][indx] != null) {
double w = tiles_xyzw[nl][indx][3] / sw;
if (w > 0.0){
/*
double d = kz * (disp_str[nl][0][indx] - swz);
double wd = w*d;
double x = ((indx % stSize2) - stSize + 0.5) * tileSize + 0.5 - swx;
double y = ((indx / stSize2) - stSize + 0.5) * tileSize + 0.5 - swy;
acovar [0][0] += wd * d;
acovar [0][1] += wd * x;
acovar [0][2] += wd * y;
acovar [1][1] += w * x * x;
acovar [1][2] += w * x * y;
acovar [2][2] += w * y * y;
*/
double x = tiles_xyzw[nl][indx][0] - swx;
double y = tiles_xyzw[nl][indx][1] - swy;
double z = tiles_xyzw[nl][indx][2] - swz;
acovar [0][0] += w * x * x;
acovar [0][1] += w * x * y;
acovar [0][2] += w * x * z;
acovar [1][1] += w * y * y;
acovar [1][2] += w * y * z;
acovar [2][2] += w * z * z;
}
}
}
}
}
acovar [1][0] = acovar [0][1];
acovar [2][0] = acovar [0][2];
acovar [2][1] = acovar [1][2];
Matrix covar = new Matrix(acovar);
EigenvalueDecomposition eig = covar.eig();
if (Double.isNaN(eig.getV().get(0, 0))){
System.out.println("getCovar(): Double.isNaN(eig.getV().get(0, 0))");
debugLevel = 20;
}
if (debugLevel > 3){
// if (debugLevel > 0){
System.out.println("getCovar(): sw = "+sw +", swz = "+swz +", swx = "+swx +", swy = "+swy +", covar.det() = "+covar.det());
System.out.println("getCovar(): covarianvce matrix, number of used points:"+num_tiles);
covar.print(10, 6); // w,d
System.out.println("getCovar(): eigenvalues");
eig.getD().print(10, 6); // w,d
System.out.println("getCovar(): eigenvectors");
eig.getV().print(10, 6); // w,d
}
if ((eig.getD().get(0, 0) == 0.0) || (Math.abs(covar.det()) < mindet)) {
return null; // testing with zero eigenvalue
// Problem with zero eigenvalue is with derivatives and coordinate conversion
}
double [][] eig_val = eig.getD().getArray(); // rslt[0];
double [][] eig_vect = eig.getV().getArray(); // rslt[1];
// find vector most orthogonal to view // (anyway it all works with that assumption), make it first
// TODO normalize to local linear scales
// probably this reordering is not needed as they are already ordered
// for world coordinates the sequence is normal x,y,z (not d,x,y)
int oindx = 0;
for (int i = 1; i < 3 ; i++){
if (eig_val[i][i] < eig_val[oindx][oindx]){
oindx = i;
}
}
if (eig_val[oindx][oindx] == 0.0){
......@@ -1145,8 +1620,14 @@ public class TilePlanes {
{eig_vect[0][hindx],eig_vect[1][hindx],eig_vect[2][hindx]}}; // "vertical" axis // to detect skinny planes and poles
// Make normal be towards camera (positive disparity), next vector - positive in X direction (right)
for (int v = 0; v < 2; v++) {
if (plane[v][v] < 0.0) for (int i = 0; i < 3; i ++) plane[v][i] = -plane[v][i];
double from = 0.0; // see if the first vector ("plane" normal) is away from the camera
for (int i = 0; i < 3; i++) {
from += this.wxyz[i]*plane[0][i];
}
if (from > 0.0){ // reverse the first vector to be towards the camera
for (int i = 0; i < 3; i++) {
plane[0][i] = -plane[0][i];
}
}
// make direction last vector so px (x) py (.) disp < 0 (left-hand coordinate system)
......@@ -1154,11 +1635,12 @@ public class TilePlanes {
for (int i = 0; i < 3; i ++) plane[2][i] = -plane[2][i];
}
setZxy(swz, swx, swy);
setWeight(sw);
setValues(eig_val[oindx][oindx],eig_val[vindx][vindx],eig_val[hindx][hindx]); // eigenvalues [0] - thickness, 2 other to detect skinny (poles)
setVectors (plane);
setNumPoints (num_tiles);
// setZxy(swz, swx, swy);
setWeight(sw); // should be the same
setWValues(eig_val[oindx][oindx],eig_val[vindx][vindx],eig_val[hindx][hindx]); // eigenvalues [0] - thickness, 2 other to detect skinny (poles)
setWVectors (plane);
setNumPoints (num_tiles); // should be the same
boolean [] plane_sel = null;
boolean need_clone = true;
for (int nl = 0; nl < tile_sel.length; nl++){
......@@ -1178,13 +1660,19 @@ public class TilePlanes {
}
setPlaneSelection(plane_sel);
// return disp_str;
return disp_str;
}
public double [][] initMergedValue()
{
this.merged_eig_val = new double[8][];
this.merged_eig_eq = new double[8][];
this.merged_weig_val = new double[8][];
this.merged_weig_eq = new double[8][];
this.merged_valid = new boolean[8][];
return this.merged_eig_val;
}
......@@ -1198,14 +1686,29 @@ public class TilePlanes {
return this.merged_eig_eq;
}
public double [][] getMergedWValue()
{
return this.merged_weig_val;
}
public double [][] getMergedWValueEq()
{
return this.merged_weig_eq;
}
public double [] initMergedValue(int dir, int leng)
{
this.merged_eig_val[dir] = new double[leng];
this.merged_eig_eq[dir] = new double[leng];
this.merged_weig_val[dir] = new double[leng];
this.merged_weig_eq[dir] = new double[leng];
this.merged_valid[dir] = new boolean[leng];
for (int i = 0; i < leng; i++) {
this.merged_eig_val[dir][i] = Double.NaN;
this.merged_eig_eq[dir][i] = Double.NaN;
this.merged_eig_val[dir][i] = Double.NaN;
this.merged_eig_eq[dir][i] = Double.NaN;
this.merged_weig_val[dir][i] = Double.NaN;
this.merged_weig_eq[dir][i] = Double.NaN;
}
return getMergedValue(dir);
}
......@@ -1226,6 +1729,24 @@ public class TilePlanes {
return this.merged_eig_eq[dir];
}
public double [] getMergedWValue(int dir)
{
if (this.merged_weig_val == null) {
return null;
}
return this.merged_weig_val[dir];
}
public double [] getMergedWValueEq(int dir)
{
if (this.merged_weig_eq == null) {
return null;
}
return this.merged_weig_eq[dir];
}
public double getMergedValue(int dir, int plane)
{
if ((this.merged_eig_val == null) ||(this.merged_eig_val[dir] == null)){
......@@ -1242,6 +1763,22 @@ public class TilePlanes {
return this.merged_eig_eq[dir][plane];
}
public double getMergedWValue(int dir, int plane)
{
if ((this.merged_weig_val == null) || (this.merged_weig_val[dir] == null)){
return Double.NaN;
}
return this.merged_weig_val[dir][plane];
}
public double getMergedWValueEq(int dir, int plane)
{
if ((this.merged_weig_eq == null) ||(this.merged_weig_eq[dir] == null)){
return Double.NaN;
}
return this.merged_weig_eq[dir][plane];
}
public void setNeibMatch(int dir, int plane, double value)
{
this.merged_eig_val[dir][plane] = value;
......@@ -1251,6 +1788,14 @@ public class TilePlanes {
this.merged_eig_eq[dir][plane] = value;
}
public void setNeibWMatch(int dir, int plane, double value)
{
this.merged_weig_val[dir][plane] = value;
}
public void setNeibWMatchEq(int dir, int plane, double value)
{
this.merged_weig_eq[dir][plane] = value;
}
public boolean [][] getMergedValid()
{
......@@ -1265,6 +1810,16 @@ public class TilePlanes {
return this.merged_valid[dir];
}
public boolean hasMergedValid(int dir){
if ((this.merged_valid == null) || (this.merged_valid[dir] == null)){
return false;
}
for (int np = 0; np < this.merged_valid[dir].length; np++){
if (this.merged_valid[dir][np]) return true;
}
return false;
}
public boolean isMergedValid(int dir, int plane)
{
if ((this.merged_valid == null) || (this.merged_valid[dir] == null)){
......@@ -1396,6 +1951,54 @@ public class TilePlanes {
this.values[1] = v2;
this.values[2] = v3;
}
public double[] getWxyz() {
return wxyz;
}
public void setWxyz(double[] wxyz) {
this.wxyz = wxyz;
}
public void setWxyz(
double x,
double y,
double z) {
this.wxyz = new double [3];
this.wxyz[0] = x;
this.wxyz[1] = y;
this.wxyz[2] = z;
}
public double[][] getWVectors() {
return wvectors;
}
public double[] getWVector() {
return wvectors[0];
}
public void setWVectors(double[][] wvectors) {
this.wvectors = wvectors;
}
public double[] getWValues() {
return wvalues;
}
public double getWValue() {
return wvalues[0];
}
public void setWValues(double[] wvalues) {
this.wvalues = wvalues;
}
public void setWValues(double wv1, double wv2, double wv3) {
this.wvalues = new double[3];
this.wvalues[0] = wv1;
this.wvalues[1] = wv2;
this.wvalues[2] = wv3;
}
public int getNumPoints() {
return num_points;
}
......@@ -2053,23 +2656,6 @@ public class TilePlanes {
* @return PlaneData object representing merged planes with combined weight (scale_other*otherPd.weight + this.weight),
* recalculated center, eigenvalues and eigenvectors
*/
public PlaneData mergePlaneToThis1(
PlaneData otherPd,
double scale_other,
boolean ignore_weights,
boolean sum_weights,
boolean preferDisparity, // Always start with disparity-most axis (false - lowest eigenvalue)
int debugLevel)
{
return mergePlaneToThis(
otherPd,
scale_other,
1.0, // double starWeightPwr, // Use this power of tile weight when calculating connection cost
ignore_weights,
sum_weights,
preferDisparity, // Always start with disparity-most axis (false - lowest eigenvalue)
debugLevel);
}
public PlaneData mergePlaneToThis(
PlaneData otherPd,
double scale_other,
......@@ -2246,16 +2832,200 @@ public class TilePlanes {
new_weight = Math.pow(new_weight,1.0/starWeightPwr);
}
pd.setWeight(new_weight);
/*
if (sum_weights) {
pd.setWeight(sum_weight); // normalize while averaging by the caller
} else { // how it was before
pd.setWeight(other_fraction * other_weight + (1.0 - other_fraction) * this_weight);
}
*/
pd.setNumPoints(otherPd.getNumPoints()+this.getNumPoints());
// Repeat merging for world-based planes
mergePlaneToThisWorld(
otherPd, // PlaneData otherPd,
pd, // PlaneData pd_partial, // disparity-based data is already merged
scale_other,
starWeightPwr, // Use this power of tile weight when calculating connection cost
ignore_weights,
sum_weights,
preferDisparity, // Always start with disparity-most axis (false - lowest eigenvalue)
debugLevel);
return pd;
}
// only handles wxyz, wvalues,wvectors - the rest should be done with mergePlaneToThis()
private PlaneData mergePlaneToThisWorld(
PlaneData otherPd,
PlaneData pd_partial, // disparity-based data is already merged
double scale_other,
double starWeightPwr, // Use this power of tile weight when calculating connection cost
boolean ignore_weights,
boolean sum_weights,
boolean preferDisparity, // Always start with disparity-most axis (false - lowest eigenvalue)
int debugLevel)
{
if (debugLevel > 0) {
System.out.println("mergePlaneToThisWorld()");
}
double [][] this_eig_avals = {
{wvalues[0], 0.0, 0.0},
{0.0, wvalues[1], 0.0},
{0.0, 0.0, wvalues[2]}};
double [][] other_eig_avals = {
{otherPd.wvalues[0], 0.0, 0.0},
{0.0, otherPd.wvalues[1], 0.0},
{0.0, 0.0, otherPd.wvalues[2]}};
Matrix this_eig_vals = new Matrix(this_eig_avals);
Matrix other_eig_vals = new Matrix(other_eig_avals);
Matrix other_eig_vectors = new Matrix(otherPd.wvectors).transpose(); // vectors are saved as rows
Matrix this_eig_vectors = new Matrix(this.wvectors).transpose(); // vectors are saved as rows
Matrix this_center = new Matrix(this.getWxyz(),3);
Matrix other_center = new Matrix(otherPd.getWxyz(),3); // should already be relative to this supertile center
double this_weight = this.weight;
double other_weight = otherPd.weight;
if (starWeightPwr == 0){
ignore_weights = true;
} else if (starWeightPwr != 1.0){
this_weight = Math.pow(this_weight, starWeightPwr);
other_weight = Math.pow(other_weight,starWeightPwr);
}
double sum_weight = scale_other * other_weight + this_weight;// should be the same for
double other_fraction = ignore_weights? (scale_other/(scale_other + 1.0)): ((scale_other * other_weight) / sum_weight);
Matrix common_center = this_center.times(1.0 - other_fraction).plus(other_center.times(other_fraction));
Matrix other_offset = other_center.minus(this_center); // other center from this center
if ((this.values[0] == 0.0) || (otherPd.values[0] == 0.0)) {
System.out.println("Zero eigenvalue");
debugLevel = 10;
}
if (debugLevel > 0) {
System.out.println("other_eig_vals");
other_eig_vals.print(8, 6);
System.out.println("this_eig_vals");
this_eig_vals.print(8, 6);
System.out.println("other_eig_vectors");
other_eig_vectors.print(8, 6);
System.out.println("this_eig_vectors");
this_eig_vectors.print(8, 6);
System.out.println("other_center");
other_center.print(8, 6);
System.out.println("this_center");
this_center.print(8, 6);
System.out.println("common_center");
common_center.print(8, 6);
System.out.println("other_offset");
other_offset.print(8, 6);
System.out.println("other_fraction="+other_fraction);
}
double [][] acovar = { // covariance matrix of center masses (not yet scaled by weight)
{other_offset.get(0,0)*other_offset.get(0,0), other_offset.get(0,0)*other_offset.get(1,0), other_offset.get(0,0)*other_offset.get(2,0)},
{other_offset.get(1,0)*other_offset.get(0,0), other_offset.get(1,0)*other_offset.get(1,0), other_offset.get(1,0)*other_offset.get(2,0)},
{other_offset.get(2,0)*other_offset.get(0,0), other_offset.get(2,0)*other_offset.get(1,0), other_offset.get(2,0)*other_offset.get(2,0)}};
Matrix other_covar = other_eig_vectors.times(other_eig_vals.times(other_eig_vectors.transpose()));
Matrix this_covar = this_eig_vectors.times(this_eig_vals.times(this_eig_vectors.transpose()));
Matrix covar = (new Matrix(acovar)).times(other_fraction*(1.0-other_fraction)); // only centers with all masses
if (debugLevel > 0) {
System.out.println("other_covar");
other_covar.print(8, 6);
System.out.println("this_covar");
this_covar.print(8, 6);
System.out.println("covar");
covar.print(8, 6);
}
covar.plusEquals(other_covar.times(other_fraction));
if (debugLevel > 0) {
System.out.println("covar with other_covar");
covar.print(8, 6);
}
covar.plusEquals(this_covar.times(1.0 - other_fraction));
if (debugLevel > 0) {
System.out.println("covar with other_covar and this_covar");
covar.print(8, 6);
}
if (Double.isNaN(covar.get(0, 0))){
System.out.println("covar is NaN !");
covar.print(8, 6);
}
// extract new eigenvalues, eigenvectors
EigenvalueDecomposition eig = covar.eig(); // verify NaN - it gets stuck
// eig.getD().getArray(),
// eig.getV().getArray(),
if (debugLevel > 0) {
System.out.println("eig.getV()");
eig.getV().print(8, 6);
System.out.println("eig.getD()");
eig.getD().print(8, 6);
}
double [][] eig_vect = eig.getV().getArray();
double [][] eig_val = eig.getD().getArray();
// probably this reordering is not needed as they are already ordered
// for world coordinates the sequence is normal x,y,z (not d,x,y)
int oindx = 0;
for (int i = 1; i <3; i++){
if (eig_val[i][i] < eig_val[oindx][oindx]){
oindx = i;
}
}
// select 2 other axes for increasing eigenvalues (so v is short axis, h is the long one)
int vindx = (oindx == 0)? 1 : 0;
int hindx = (oindx == 0)? 2 : ((oindx == 1) ? 2 : 1);
if (eig_val[vindx][vindx] > eig_val[hindx][hindx]){
int tmp = vindx;
vindx = hindx;
hindx = tmp;
}
double [][] plane = {
{eig_vect[0][oindx],eig_vect[1][oindx],eig_vect[2][oindx]}, // plane normal to camera
{eig_vect[0][vindx],eig_vect[1][vindx],eig_vect[2][vindx]}, // "horizontal" axis // to detect skinny planes and poles
{eig_vect[0][hindx],eig_vect[1][hindx],eig_vect[2][hindx]}}; // "vertical" axis // to detect skinny planes and poles
// make towards camera, left coordinate system
// Make normal be towards camera (positive disparity), next vector - positive in X direction (right)
double from = 0.0; // see if the first vector ("plane" normal) is away from the camera
for (int i = 0; i < 3; i++) {
from += this.wxyz[i]*plane[0][i];
}
if (from > 0.0){ // reverse the first vector to be towards the camera
for (int i = 0; i < 3; i++) {
plane[0][i] = -plane[0][i];
}
}
// make direction last vector so px (x) py (.) disp < 0 (left-hand coordinate system)
if (new Matrix(plane).det() > 0){
for (int i = 0; i < 3; i ++) plane[2][i] = -plane[2][i];
}
// PlaneData pd = this.clone(); // will copy selections too
// pd.invalidateCalculated(); // real world vectors
pd_partial.setWValues(eig_val[oindx][oindx],eig_val[vindx][vindx],eig_val[hindx][hindx]); // eigenvalues [0] - thickness, 2 other to detect skinny (poles)
pd_partial.setWVectors(plane);
pd_partial.setWxyz(common_center.getColumnPackedCopy()); // set new center
// what weight to use? cloned is original weight for this supertile
// or use weighted average like below?
if (debugLevel < -1000) { // already done in mergePlaneToThis() that call this method
double new_weight;
if (sum_weights) {
new_weight = sum_weight; // normalize while averaging by the caller
} else { // how it was before
new_weight = other_fraction * other_weight + (1.0 - other_fraction) * this_weight;
}
if (!ignore_weights && ((starWeightPwr != 1.0))){
new_weight = Math.pow(new_weight,1.0/starWeightPwr);
}
pd_partial.setWeight(new_weight);
}
return pd_partial;
}
/**
* Convert plane data from other supertile to this one (disparity, px, py) for the center of this supertile
......@@ -2424,6 +3194,13 @@ public class TilePlanes {
return world_v1;
}
}
//this.correctDistortions
public double [] getCenterXYZ(
int debugLevel){
return getCenterXYZ(this.correctDistortions, debugLevel);
}
public double [] getCenterXYZ(
boolean correct_distortions,
int debugLevel)
......@@ -2445,6 +3222,10 @@ public class TilePlanes {
return center_xyz;
}
public double [] getWorldXYZ(
int debugLevel){
return getWorldXYZ(this.correctDistortions, debugLevel);
}
public double [] getWorldXYZ(
boolean correct_distortions,
int debugLevel)
......
......@@ -3380,105 +3380,147 @@ public class TileProcessor {
0, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
/* } else {
st.processPlanes4(
clt_parameters.stMeasSel, // = 1 //Select measurements for supertiles : +1 - combo, +2 - quad +4 - hor +8 - vert
clt_parameters.plDispNorm, // = 2.0; // Normalize disparities to the average if above
clt_parameters.plMinPoints, // = 5; // Minimal number of points for plane detection
clt_parameters.plTargetEigen, // = 0.1; // Remove outliers until main axis eigenvalue (possibly scaled by plDispNorm) gets below
clt_parameters.plFractOutliers, // = 0.3; // Maximal fraction of outliers to remove
clt_parameters.plMaxOutliers, // = 20; // Maximal number of outliers to remove\
clt_parameters.plPreferDisparity,
geometryCorrection,
clt_parameters.correct_distortions,
clt_parameters.stSmplMode , // final boolean smplMode, // = true; // Use sample mode (false - regular tile mode)
clt_parameters.stSmplSide , // final int smplSide, // = 2; // Sample size (side of a square)
clt_parameters.stSmplNum , // final int smplNum, // = 3; // Number after removing worst
clt_parameters.stSmplRms , // final double smplRms, // = 0.1; // Maximal RMS of the remaining tiles in a sample
showDoubleFloatArrays sdfa_instance = null;
if (debugLevel > -1) sdfa_instance = new showDoubleFloatArrays(); // just for debugging?
// Trying new class
LinkPlanes lp = new LinkPlanes (clt_parameters, st);
// try to merge multiple times
int max_num_merge_try = 5;
TilePlanes.PlaneData [][][] dbg_orig_planes = new TilePlanes.PlaneData [max_num_merge_try][][];
for (int num_merge_try = 0; num_merge_try < max_num_merge_try; num_merge_try++){
lp.matchPlanes(
st.planes, // final TilePlanes.PlaneData [][] planes,
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
clt_parameters.plBlurBinHor, // final double bin_blur_hor, // Blur disparity histograms for horizontal clusters by this sigma (in bins)
clt_parameters.plBlurBinVert, // final double bin_blur_vert, // Blur disparity histograms for constant disparity clusters by this sigma (in bins)
lp.filterNeighborPlanes(
st.planes, // final TilePlanes.PlaneData [][] planes,
true, // final boolean merge_low_eigen,
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
clt_parameters.stSmallDiff, // = 0.4; // Consider merging initial planes if disparity difference below
clt_parameters.stHighMix, //stHighMix = 0.4; // Consider merging initial planes if jumps between ratio above
// clt_parameters.vertical_xyz,
0, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
}
*/
showDoubleFloatArrays sdfa_instance = null;
if (debugLevel > -1) sdfa_instance = new showDoubleFloatArrays(); // just for debugging?
// Trying new class
LinkPlanes lp = new LinkPlanes (clt_parameters, st);
lp.matchPlanes(
st.planes, // final TilePlanes.PlaneData [][] planes,
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
// calculate it here - use results to keep some planes from merging
double [][] quality_stats1 = lp.selectNeighborPlanesMutual(
st.planes, // final TilePlanes.PlaneData [][] planes,
2, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
lp.setNonExclusive(
st.planes, // final TilePlanes.PlaneData [][] planes,
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
lp.calcStarValueStrength(
true, // boolean set_start_planes,
clt_parameters.plStarOrtho, // orthoWeight, // final double orthoWeight,
clt_parameters.plStarDiag, // diagonalWeight, // final double diagonalWeight,
clt_parameters.plStarPwr, // starPwr, // final double starPwr, // Divide cost by number of connections to this power
clt_parameters.plStarWeightPwr,// starWeightPwr, // final double starWeightPwr, // Use this power of tile weight when calculating connection cost
clt_parameters.plWeightToDens, // weightToDens, // Balance weighted density against density. 0.0 - density, 1.0 - weighted density
clt_parameters.plStarValPwr, // starValPwr, //double starValPwr, // Raise value of each tile before averaging
2, // starSteps, // final int steps,
st.planes, // final TilePlanes.PlaneData [][] planes,
clt_parameters.plPreferDisparity, // preferDisparity, // final boolean preferDisparity)
0); // debugLevel);
int [][][] merge_candidates = lp.getMergeSameTileCandidates(
st.planes, // final TilePlanes.PlaneData [][] planes,
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
lp.filterNeighborPlanes(
st.planes, // final TilePlanes.PlaneData [][] planes,
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
boolean [][][] plane_nooverlaps = lp.overlapSameTileCandidates (
st.planes, // final TilePlanes.PlaneData [][] planes,
merge_candidates, // final int [][][] merge_candidates,
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
int [][][] merge_candidates = lp.getMergeSameTileCandidates(
st.planes, // final TilePlanes.PlaneData [][] planes,
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
// remove merge candidates that break connections to neighbors
lp.keepSameTileConnections(
st.planes, // final TilePlanes.PlaneData [][] planes,
merge_candidates, // final int [][][] merge_candidates,
plane_nooverlaps, // final boolean [][][] valid_candidates, // will be updated
true, // final boolean merge_low_eigen,
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
boolean [][][] plane_nooverlaps = lp.overlapSameTileCandidates (
st.planes, // final TilePlanes.PlaneData [][] planes,
merge_candidates, // final int [][][] merge_candidates,
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
/*
merge_candidates = lp.filterMergeSameTileCandidates(
st.planes, // final TilePlanes.PlaneData [][] planes,
merge_candidates, // final int [][][] merge_candidates,
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
boolean [][] pairs_to_merge = lp.mergeSameTileEvaluate(
st.planes, // final TilePlanes.PlaneData [][] planes,
merge_candidates, // final int [][][] merge_candidates,
plane_nooverlaps, // boolean [][][] plane_overlaps,
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
System.out.println(pairs_to_merge.length);
*/
int [][][] merge_groups = lp.extractMergeSameTileGroups(
st.planes, // final TilePlanes.PlaneData [][] planes,
merge_candidates, // final int [][][] merge_candidates,
plane_nooverlaps, // boolean [][][] plane_overlaps,
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
int num_removed_by_merging = st.applyMergePlanes(
st.planes, // final TilePlanes.PlaneData[][] planes,
merge_groups, // final int [][][] merge_groups,
// parameters to generate ellipsoids
0.0, // 3, // final double disp_far, // minimal disparity to select (or NaN)
Double.NaN, // final double disp_near, // maximal disparity to select (or NaN)
clt_parameters.plDispNorm, // final double dispNorm, // Normalize disparities to the average if above
0.0, // final double min_weight,
clt_parameters.plMinPoints, // final int min_tiles,
// parameters to reduce outliers
clt_parameters.plTargetEigen, // final double targetEigen, // = 0.1; // Remove outliers until main axis eigenvalue (possibly scaled by plDispNorm) gets below
clt_parameters.plFractOutliers, // final double fractOutliers, // = 0.3; // Maximal fraction of outliers to remove
clt_parameters.plMaxOutliers, // final int maxOutliers, // = 20; // Maximal number of outliers to remove
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
// double [][][][][][] merge_cost_data =
lp.costSameTileConnections(
false, // final boolean ignore_weights,
1000.0, // final double threshold_worst,
1000.0, //final double threshold_world_worst,
st.planes, // ffinal TilePlanes.PlaneData [][] planes,
merge_candidates, // final int [][][] merge_candidates,
plane_nooverlaps, // final boolean [][][] valid_candidates, // will be updated
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
// System.out.println("merge_cost_data.length = " + merge_cost_data.length);
/*
// double [][][][][][] merge_cost_data_eq =
lp.costSameTileConnections(
true, // final boolean ignore_weights,
1.8, // final double threshold_worst,
1000.0, //final double threshold_world_worst,
st.planes, // ffinal TilePlanes.PlaneData [][] planes,
merge_candidates, // final int [][][] merge_candidates,
plane_nooverlaps, // final boolean [][][] valid_candidates, // will be updated
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
// System.out.println("merge_cost_data_eq.length = " + merge_cost_data_eq.length);
*/
int [][][] merge_groups = lp.extractMergeSameTileGroups(
st.planes, // final TilePlanes.PlaneData [][] planes,
merge_candidates, // final int [][][] merge_candidates,
plane_nooverlaps, // boolean [][][] plane_overlaps,
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
dbg_orig_planes[num_merge_try] = st.planes.clone();
for (int nsTile=0; nsTile < st.planes.length; nsTile++) if (st.planes[nsTile] != null){
dbg_orig_planes[num_merge_try][nsTile] = st.planes[nsTile].clone();
for (int np = 0; np < st.planes[nsTile].length; np++ ) if (st.planes[nsTile][np] != null){
dbg_orig_planes[num_merge_try][nsTile][np] = st.planes[nsTile][np].clone();
}
}
int num_removed_by_merging = st.applyMergePlanes(
st.planes, // final TilePlanes.PlaneData[][] planes,
merge_groups, // final int [][][] merge_groups,
// parameters to generate ellipsoids
0.0, // 3, // final double disp_far, // minimal disparity to select (or NaN)
Double.NaN, // final double disp_near, // maximal disparity to select (or NaN)
clt_parameters.plDispNorm, // final double dispNorm, // Normalize disparities to the average if above
0.0, // final double min_weight,
clt_parameters.plMinPoints, // final int min_tiles,
// parameters to reduce outliers
clt_parameters.plTargetEigen, // final double targetEigen, // = 0.1; // Remove outliers until main axis eigenvalue (possibly scaled by plDispNorm) gets below
clt_parameters.plFractOutliers, // final double fractOutliers, // = 0.3; // Maximal fraction of outliers to remove
clt_parameters.plMaxOutliers, // final int maxOutliers, // = 20; // Maximal number of outliers to remove
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
System.out.println("Try "+num_merge_try+ ": removed "+num_removed_by_merging+" planes by merging, recalculating connections");
if (num_removed_by_merging == 0){ // re-calculate all links
break;
System.out.println("Removed "+num_removed_by_merging+" planes by merging, recalculating connections");
}
}
/*
if (num_removed_by_merging > 0){ // re-calculate all links
lp.matchPlanes(
st.planes, // final TilePlanes.PlaneData [][] planes,
......@@ -3487,13 +3529,13 @@ public class TileProcessor {
clt_parameters.tileY);
lp.filterNeighborPlanes(
st.planes, // final TilePlanes.PlaneData [][] planes,
st.planes, // final TilePlanes.PlaneData [][] planes,
true, // final boolean merge_low_eigen,
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
}
*/
double [][] quality_stats1 = lp.selectNeighborPlanesMutual(
st.planes, // final TilePlanes.PlaneData [][] planes,
2, // final int debugLevel)
......@@ -3501,6 +3543,7 @@ public class TileProcessor {
clt_parameters.tileY);
st.resolveConflicts(
lp, // LinkPlanes lp,
clt_parameters.plMaxEigen,
clt_parameters.plConflDualTri, // boolean conflDualTri, // Resolve dual triangles conflict (odoodo)
clt_parameters.plConflMulti, // boolean conflMulti, // Resolve multiple odo triangles conflicts
......@@ -3622,7 +3665,8 @@ public class TileProcessor {
clt_parameters.tileY);
lp.filterNeighborPlanes(
st.planes, // final TilePlanes.PlaneData [][] planes,
st.planes, // final TilePlanes.PlaneData [][] planes,
true, // final boolean merge_low_eigen,
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
......@@ -3634,6 +3678,7 @@ public class TileProcessor {
clt_parameters.tileY);
st.resolveConflicts(
lp, // LinkPlanes lp,
clt_parameters.plMaxEigen,
clt_parameters.plConflDualTri, // boolean conflDualTri, // Resolve dual triangles conflict (odoodo)
clt_parameters.plConflMulti, // boolean conflMulti, // Resolve multiple odo triangles conflicts
......@@ -3723,7 +3768,7 @@ public class TileProcessor {
true, //boolean use_NaN)
0.0,
10.0);
double [][] plane_data = new double [plane_data_nonan.length + plane_data_nan.length + 2][];
double [][] plane_data = new double [plane_data_nonan.length + plane_data_nan.length + 3][];
int indx = 0;
for (int i = 0; i < plane_data_nonan.length; i++){
plane_data[indx++] = plane_data_nonan[i];
......@@ -3741,6 +3786,23 @@ public class TileProcessor {
if (Double.isNaN(plane_data[indx][i])) plane_data[indx][i] = 0.0;
if (plane_data[indx-1][i] > 0) plane_data[indx][i] = Double.NaN;
}
indx++;
plane_data[indx] = new double [wh[0]*wh[1]];
for (int i = 0; i < plane_data[indx].length; i++){
int dbg_stx = (i % wh[0]) /superTileSize;
int dbg_sty = (i / wh[0]) /superTileSize;
int dbg_nsTile = dbg_stx + dbg_sty * (wh[0]/superTileSize);
if (st.planes_mod[dbg_nsTile] == null){
plane_data[indx][i] = Double.NaN;
}else {
int dbg_nl = 0;
for (int j = 0; j < st.planes_mod[dbg_nsTile].length; j++){
if (st.planes_mod[dbg_nsTile][j] != null) dbg_nl++;
}
plane_data[indx][i] = dbg_nl;
}
}
sdfa_instance.showArrays(plane_data, wh[0], wh[1], true, "plane_data");
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment