Commit 9b725244 authored by Andrey Filippov's avatar Andrey Filippov

before removing old versions

parent 9b8a5082
...@@ -81,6 +81,18 @@ public class LinkPlanes { ...@@ -81,6 +81,18 @@ public class LinkPlanes {
public double plCutTail; // = 1.4; // When merging with neighbors cut the tail that is worse than scaled best public double plCutTail; // = 1.4; // When merging with neighbors cut the tail that is worse than scaled best
public double plMinTail; // = 0.015;// Set cutoff value level not less than public double plMinTail; // = 0.015;// Set cutoff value level not less than
public int plMinPoints; // = 5; // Minimal number of points for plane detection
public double plTargetEigen; // = 0.02; // Remove outliers until main axis eigenvalue (possibly scaled by plDispNorm) gets below
public double plFractOutliers; // = 0.3; // Maximal fraction of outliers to remove
public int plMaxOutliers; // = 20; // Maximal number of outliers to remove
public double plPull = 5.0; // .3; // Relative weight of original (measured) plane compared to average neighbor pull
public int plIterations = 10; // Maximal number of smoothing iterations for each step
public boolean plStopBad = true; // Do not update supertile if any of connected neighbors is not good (false: just skip that neighbor)
public int plPrecision = 6; // Maximal step difference (1/power of 10)
public double plNormPow = .5; // 0.0: 8 neighbors pull 8 times as 1, 1.0 - same as 1
public int dbg_tileX; public int dbg_tileX;
public int dbg_tileY; public int dbg_tileY;
...@@ -137,6 +149,17 @@ public class LinkPlanes { ...@@ -137,6 +149,17 @@ public class LinkPlanes {
plMaxDisp = clt_parameters.plMaxDisp; plMaxDisp = clt_parameters.plMaxDisp;
plCutTail = clt_parameters.plCutTail; plCutTail = clt_parameters.plCutTail;
plMinTail = clt_parameters.plMinTail; plMinTail = clt_parameters.plMinTail;
plMinPoints = clt_parameters.plMinPoints;
plTargetEigen = clt_parameters.plTargetEigen;
plFractOutliers = clt_parameters.plFractOutliers;
plMaxOutliers = clt_parameters.plMaxOutliers;
plPull = clt_parameters.plPull;
plIterations = clt_parameters.plIterations;
plStopBad = clt_parameters.plStopBad;
plPrecision = clt_parameters.plPrecision;
plNormPow = clt_parameters.plNormPow;
dbg_tileX = clt_parameters.tileX; dbg_tileX = clt_parameters.tileX;
dbg_tileY = clt_parameters.tileY; dbg_tileY = clt_parameters.tileY;
...@@ -244,7 +267,7 @@ public class LinkPlanes { ...@@ -244,7 +267,7 @@ public class LinkPlanes {
", at least one of them < plMaxDisp="+plMaxDisp); ", at least one of them < plMaxDisp="+plMaxDisp);
return false; return false;
} else { } else {
if (debugLevel > 0) System.out.println(prefix+" disparity ratio ("+disp1+":"+disp2+" is OK, <= plMaxZRatio="+plMaxZRatio); if (debugLevel > 1) System.out.println(prefix+" disparity ratio ("+disp1+":"+disp2+" is OK, <= plMaxZRatio="+plMaxZRatio);
} }
if (!merge_weak && (plane1.getWeight() < plMinStrength)) { if (!merge_weak && (plane1.getWeight() < plMinStrength)) {
...@@ -724,7 +747,14 @@ public class LinkPlanes { ...@@ -724,7 +747,14 @@ public class LinkPlanes {
} }
return qualities; // TODO: add modes to select what is output return qualities; // TODO: add modes to select what is output
} }
/**
* Calculate what be thickness eigenvalues for merging this plane with individual neighbors
* @param planes array of per supertile, per layer plane instances
* @param debugLevel
* @param dbg_X
* @param dbg_Y
*/
public void matchPlanes( public void matchPlanes(
final TilePlanes.PlaneData [][] planes, final TilePlanes.PlaneData [][] planes,
final int debugLevel, final int debugLevel,
...@@ -744,7 +774,7 @@ public class LinkPlanes { ...@@ -744,7 +774,7 @@ public class LinkPlanes {
// final int debug_stile = 20 * stilesX + 27; // final int debug_stile = 20 * stilesX + 27;
final int debug_stile = dbg_Y * stilesX + dbg_X; final int debug_stile = dbg_Y * stilesX + dbg_X;
final Thread[] threads = ImageDtt.newThreadArray(st.tileProcessor.threadsMax); final Thread[] threads = ImageDtt.newThreadArray((debugLevel > 1)? 1 : st.tileProcessor.threadsMax);
final AtomicInteger ai = new AtomicInteger(0); final AtomicInteger ai = new AtomicInteger(0);
// Select best symmetrical match, consider only N, NE, E, SE - later opposite ones will be copied // Select best symmetrical match, consider only N, NE, E, SE - later opposite ones will be copied
for (int ithread = 0; ithread < threads.length; ithread++) { for (int ithread = 0; ithread < threads.length; ithread++) {
...@@ -754,9 +784,11 @@ public class LinkPlanes { ...@@ -754,9 +784,11 @@ public class LinkPlanes {
for (int nsTile0 = ai.getAndIncrement(); nsTile0 < nStiles; nsTile0 = ai.getAndIncrement()) { for (int nsTile0 = ai.getAndIncrement(); nsTile0 < nStiles; nsTile0 = ai.getAndIncrement()) {
int sty0 = nsTile0 / stilesX; int sty0 = nsTile0 / stilesX;
int stx0 = nsTile0 % stilesX; int stx0 = nsTile0 % stilesX;
int dl = ((debugLevel > -1) && (nsTile0 == debug_stile)) ? 1:0; // int dl = ((debugLevel > 0) && (nsTile0 == debug_stile)) ? 1:0;
int dl = ((debugLevel > 1) && (nsTile0 == debug_stile)) ? 3: debugLevel;
if ( planes[nsTile0] != null) { if ( planes[nsTile0] != null) {
if (dl > 0){ if (dl > 1){
System.out.println("matchPlanes(): nsTile0 ="+nsTile0); System.out.println("matchPlanes(): nsTile0 ="+nsTile0);
// dbg_planes = planes[nsTile0]; // dbg_planes = planes[nsTile0];
} }
...@@ -783,7 +815,7 @@ public class LinkPlanes { ...@@ -783,7 +815,7 @@ public class LinkPlanes {
if (other_planes[np] != null) { if (other_planes[np] != null) {
TilePlanes.PlaneData other_plane = this_plane.getPlaneToThis( TilePlanes.PlaneData other_plane = this_plane.getPlaneToThis(
other_planes[np], other_planes[np],
dl-1); // debugLevel); dl - 2); // debugLevel);
if (other_plane !=null) { // now always, but may add later if (other_plane !=null) { // now always, but may add later
TilePlanes.PlaneData merged_pd = this_plane.mergePlaneToThis( TilePlanes.PlaneData merged_pd = this_plane.mergePlaneToThis(
other_plane, // PlaneData otherPd, other_plane, // PlaneData otherPd,
...@@ -792,14 +824,14 @@ public class LinkPlanes { ...@@ -792,14 +824,14 @@ public class LinkPlanes {
false, // boolean ignore_weights, false, // boolean ignore_weights,
true, // boolean sum_weights, true, // boolean sum_weights,
plPreferDisparity, plPreferDisparity,
dl-1); // int debugLevel) dl-2); // int debugLevel)
if (merged_pd !=null) { // now always, but may add later if (merged_pd !=null) { // now always, but may add later
/// merged_pd.scaleWeight(0.5); /// merged_pd.scaleWeight(0.5);
this_plane.setNeibMatch (dir, np, merged_pd.getValue()); // smallest eigenValue this_plane.setNeibMatch (dir, np, merged_pd.getValue()); // smallest eigenValue
this_plane.setNeibWMatch(dir, np, merged_pd.getWValue()); // smallest eigenValue this_plane.setNeibWMatch(dir, np, merged_pd.getWValue()); // smallest eigenValue
} }
if (dl > 0){ if (dl > 1){
System.out.println("matchPlanes(): nsTile0 ="+nsTile0+":"+np0+"-("+dir+")->"+nsTile+":"+np+" (ignore_weights=true)"); System.out.println("matchPlanes(): nsTile0 ="+nsTile0+":"+np0+"-("+dir+")->"+nsTile+":"+np+" (ignore_weights=true)");
} }
merged_pd = this_plane.mergePlaneToThis( merged_pd = this_plane.mergePlaneToThis(
...@@ -809,14 +841,14 @@ public class LinkPlanes { ...@@ -809,14 +841,14 @@ public class LinkPlanes {
true, // false, // boolean ignore_weights, true, // false, // boolean ignore_weights,
true, // boolean sum_weights, true, // boolean sum_weights,
plPreferDisparity, plPreferDisparity,
dl-0); // int debugLevel) dl-1); // int debugLevel)
if (merged_pd !=null) { // now always, but may add later if (merged_pd !=null) { // now always, but may add later
/// merged_pd.scaleWeight(0.5); /// merged_pd.scaleWeight(0.5);
this_plane.setNeibMatchEq (dir, np, merged_pd.getValue()); // smallest eigenValue this_plane.setNeibMatchEq (dir, np, merged_pd.getValue()); // smallest eigenValue
this_plane.setNeibWMatchEq(dir, np, merged_pd.getWValue()); // smallest eigenValue this_plane.setNeibWMatchEq(dir, np, merged_pd.getWValue()); // smallest eigenValue
} }
if (dl > 0){ if (dl > 1){
System.out.println("matchPlanes(): nsTile0 ="+nsTile0+":"+np0+"-("+dir+")->"+nsTile+":"+np+"...DONE, merged_pd.getWValue()="+merged_pd.getWValue()); System.out.println("matchPlanes(): nsTile0 ="+nsTile0+":"+np0+"-("+dir+")->"+nsTile+":"+np+"...DONE, merged_pd.getWValue()="+merged_pd.getWValue());
} }
...@@ -829,7 +861,7 @@ public class LinkPlanes { ...@@ -829,7 +861,7 @@ public class LinkPlanes {
} }
} }
} }
if (dl > 0){ if (dl > 1){
System.out.println("matchPlanes(): nsTile0 ="+nsTile0+ " Done."); System.out.println("matchPlanes(): nsTile0 ="+nsTile0+ " Done.");
} }
...@@ -849,7 +881,7 @@ public class LinkPlanes { ...@@ -849,7 +881,7 @@ public class LinkPlanes {
int sty0 = nsTile0 / stilesX; int sty0 = nsTile0 / stilesX;
int stx0 = nsTile0 % stilesX; int stx0 = nsTile0 % stilesX;
int dl = ((debugLevel > -1) && (nsTile0 == debug_stile)) ? 1:0; int dl = ((debugLevel > -1) && (nsTile0 == debug_stile)) ? 1:0;
if (dl>0) { if (dl > 1) {
System.out.println("matchPlanes() nsTile0="+nsTile0); System.out.println("matchPlanes() nsTile0="+nsTile0);
} }
if ( planes[nsTile0] != null) { if ( planes[nsTile0] != null) {
...@@ -893,7 +925,7 @@ public class LinkPlanes { ...@@ -893,7 +925,7 @@ public class LinkPlanes {
} }
} }
} }
if (dl>0) { if (dl > 1) {
System.out.println("matchPlanes() nsTile0="+nsTile0); System.out.println("matchPlanes() nsTile0="+nsTile0);
} }
...@@ -935,15 +967,17 @@ public class LinkPlanes { ...@@ -935,15 +967,17 @@ public class LinkPlanes {
final int debug_stile = dbg_Y * stilesX + dbg_X; final int debug_stile = dbg_Y * stilesX + dbg_X;
final TileNeibs tnSurface = new TileNeibs(stilesX, stilesY); final TileNeibs tnSurface = new TileNeibs(stilesX, stilesY);
final Thread[] threads = ImageDtt.newThreadArray(st.tileProcessor.threadsMax); final Thread[] threads = ImageDtt.newThreadArray((debugLevel > 1)? 1 : st.tileProcessor.threadsMax);
final AtomicInteger ai = new AtomicInteger(0); final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) { for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() { threads[ithread] = new Thread() {
public void run() { public void run() {
for (int nsTile0 = ai.getAndIncrement(); nsTile0 < nStiles; nsTile0 = ai.getAndIncrement()) { for (int nsTile0 = ai.getAndIncrement(); nsTile0 < nStiles; nsTile0 = ai.getAndIncrement()) {
int dl = ((debugLevel > -1) && (nsTile0 == debug_stile)) ? 1:0; // int dl = ((debugLevel > -1) && (nsTile0 == debug_stile)) ? 1:0;
int dl = ((debugLevel > 1) && (nsTile0 == debug_stile)) ? 3: debugLevel;
if ( planes[nsTile0] != null) { if ( planes[nsTile0] != null) {
if (dl > 0){ if (dl > 1){
System.out.println("interPlaneCosts(): nsTile0 ="+nsTile0); System.out.println("interPlaneCosts(): nsTile0 ="+nsTile0);
} }
for (int np0 = 0; np0 < planes[nsTile0].length; np0++){ // nu for (int np0 = 0; np0 < planes[nsTile0].length; np0++){ // nu
...@@ -961,7 +995,7 @@ public class LinkPlanes { ...@@ -961,7 +995,7 @@ public class LinkPlanes {
if (other_planes[np] != null) { if (other_planes[np] != null) {
TilePlanes.PlaneData other_plane = this_plane.getPlaneToThis( TilePlanes.PlaneData other_plane = this_plane.getPlaneToThis(
other_planes[np], other_planes[np],
dl-1); // debugLevel); dl-2); // debugLevel);
if (other_plane !=null) { // now always, but may add later if (other_plane !=null) { // now always, but may add later
double link_cost = getLinkCost( double link_cost = getLinkCost(
en_sticks, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks" en_sticks, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks"
...@@ -972,7 +1006,7 @@ public class LinkPlanes { ...@@ -972,7 +1006,7 @@ public class LinkPlanes {
Double.NaN, // double merged_wev, // if NaN will calculate assuming the same supertile - for world Double.NaN, // double merged_wev, // if NaN will calculate assuming the same supertile - for world
Double.NaN, // double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world Double.NaN, // double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world
"interPlaneCosts(): "+nsTile0+":"+np0+"-("+dir+")-->"+nsTile+":"+np, // String prefix, "interPlaneCosts(): "+nsTile0+":"+np0+"-("+dir+")-->"+nsTile+":"+np, // String prefix,
dl - 1); dl - 2);
this_plane.setLinkCosts(dir, np, link_cost); this_plane.setLinkCosts(dir, np, link_cost);
} }
} }
...@@ -982,7 +1016,7 @@ public class LinkPlanes { ...@@ -982,7 +1016,7 @@ public class LinkPlanes {
} }
} }
} }
if (dl > 0){ if (dl > 1){
System.out.println("interPlaneCosts(): nsTile0 ="+nsTile0+ " Done."); System.out.println("interPlaneCosts(): nsTile0 ="+nsTile0+ " Done.");
} }
} }
...@@ -1007,6 +1041,14 @@ public class LinkPlanes { ...@@ -1007,6 +1041,14 @@ public class LinkPlanes {
dbg_Y); dbg_Y);
} }
*/ */
/**
* Set links between planes in 8 directions, in such a way that no start/end planes can be shared
* @param planes per supertile, per plane - array of supertile instances
* @param max_cost maximal composite connection cost allowed
* @param debugLevel
* @param dbg_X
* @param dbg_Y
*/
public void setExclusiveLinks( public void setExclusiveLinks(
final TilePlanes.PlaneData [][] planes, final TilePlanes.PlaneData [][] planes,
final double max_cost, final double max_cost,
...@@ -1027,7 +1069,7 @@ public class LinkPlanes { ...@@ -1027,7 +1069,7 @@ public class LinkPlanes {
final int debug_stile = dbg_Y * stilesX + dbg_X; final int debug_stile = dbg_Y * stilesX + dbg_X;
final TileNeibs tnSurface = new TileNeibs(stilesX, stilesY); final TileNeibs tnSurface = new TileNeibs(stilesX, stilesY);
final Thread[] threads = ImageDtt.newThreadArray(st.tileProcessor.threadsMax); final Thread[] threads = ImageDtt.newThreadArray((debugLevel > 1)? 1 : st.tileProcessor.threadsMax);
final AtomicInteger ai = new AtomicInteger(0); final AtomicInteger ai = new AtomicInteger(0);
// Reset neighbors // Reset neighbors
for (int ithread = 0; ithread < threads.length; ithread++) { for (int ithread = 0; ithread < threads.length; ithread++) {
...@@ -1069,10 +1111,12 @@ public class LinkPlanes { ...@@ -1069,10 +1111,12 @@ public class LinkPlanes {
threads[ithread] = new Thread() { threads[ithread] = new Thread() {
public void run() { public void run() {
for (int nsTile0 = ai.getAndIncrement(); nsTile0 < nStiles; nsTile0 = ai.getAndIncrement()) { for (int nsTile0 = ai.getAndIncrement(); nsTile0 < nStiles; nsTile0 = ai.getAndIncrement()) {
int dl = ((debugLevel > -1) && (nsTile0 == debug_stile)) ? 1:0; // int dl = ((debugLevel > -1) && (nsTile0 == debug_stile)) ? 1:0;
int dl = ((debugLevel > 1) && (nsTile0 == debug_stile)) ? 3: debugLevel;
if ( planes[nsTile0] != null) { if ( planes[nsTile0] != null) {
int num_sp = planes[nsTile0].length; int num_sp = planes[nsTile0].length;
if (dl > 0){ if (dl > 1){
System.out.println("setExclusiveLinks(): nsTile0 ="+nsTile0); System.out.println("setExclusiveLinks(): nsTile0 ="+nsTile0);
} }
for (int dir = 0; dir < 4; dir++){// for (int dir = 0; dir < 4; dir++){//
...@@ -1120,7 +1164,7 @@ public class LinkPlanes { ...@@ -1120,7 +1164,7 @@ public class LinkPlanes {
} }
} }
} }
if (dl > 0){ if (dl > 1){
System.out.println("setExclusiveLinks(): nsTile0 ="+nsTile0+ " Done."); System.out.println("setExclusiveLinks(): nsTile0 ="+nsTile0+ " Done.");
} }
} }
...@@ -1252,237 +1296,13 @@ public class LinkPlanes { ...@@ -1252,237 +1296,13 @@ public class LinkPlanes {
* merge quality falls below scaled quality of the best, pre-set minimum or the merged plane becomes * merge quality falls below scaled quality of the best, pre-set minimum or the merged plane becomes
* too thick * too thick
* Separately calculates merged weighted plane and with equal weights of the neighbors * Separately calculates merged weighted plane and with equal weights of the neighbors
* TODO: Maybe just switch to the connection costs instead?
* @param planes array of plane instances for the same supertile * @param planes array of plane instances for the same supertile
* @param debugLevel * @param debugLevel
* @param dbg_X * @param dbg_X
* @param dbg_Y * @param dbg_Y
*/ */
public void setNonExclusive_0(
final TilePlanes.PlaneData [][] planes,
// final double center_weight,
final int debugLevel,
final int dbg_X,
final int dbg_Y)
{
final int tilesX = st.tileProcessor.getTilesX();
final int tilesY = st.tileProcessor.getTilesY();
final int superTileSize = st.tileProcessor.getSuperTileSize();
// final int tileSize = tileProcessor.getTileSize();
final int stilesX = (tilesX + superTileSize -1)/superTileSize;
final int stilesY = (tilesY + superTileSize -1)/superTileSize;
final int nStiles = stilesX * stilesY;
final double [] nan_plane = new double [superTileSize*superTileSize];
for (int i = 0; i < nan_plane.length; i++) nan_plane[i] = Double.NaN;
final int [][] dirsYX = {{-1, 0},{-1,1},{0,1},{1,1},{1,0},{1,-1},{0,-1},{-1,-1}};
// final int debug_stile = 20 * stilesX + 27;
// final int debug_stile = 17 * stilesX + 27;
// final int debug_stile = 9 * stilesX + 26;
final int debug_stile = dbg_Y * stilesX + dbg_X;
final Thread[] threads = ImageDtt.newThreadArray((debugLevel > 1)? 1 : st.tileProcessor.threadsMax);
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
// TilePlanes.PlaneData [][] dbg_planes = planes;
for (int nsTile0 = ai.getAndIncrement(); nsTile0 < nStiles; nsTile0 = ai.getAndIncrement()) {
int sty0 = nsTile0 / stilesX;
int stx0 = nsTile0 % stilesX;
int dl = (nsTile0 == debug_stile) ? debugLevel:0;
if ( planes[nsTile0] != null) {
if (dl > 0){
System.out.println("setNonExclusive() nsTile0="+nsTile0);
}
for (int np0 = 0; np0 < planes[nsTile0].length; np0++) if (planes[nsTile0][np0] != null) {
TilePlanes.PlaneData merged_pd = planes[nsTile0][np0];
ArrayList<Point> neib_list = new ArrayList<Point>();
final double [][] merged_ev = planes[nsTile0][np0].getMergedValue();
for (int dir = 0; dir < 8; dir++) if (planes[nsTile0][np0].hasMergedValid(dir)){ //
int stx = stx0 + dirsYX[dir][1];
int sty = sty0 + dirsYX[dir][0];
int nsTile = sty * stilesX + stx; // from where to get
// find best individual connection among valid ones
boolean [] merged_valid = planes[nsTile0][np0].getMergedValid(dir);
// double [] merged_ev = ignore_weights? (planes[nsTile0][np0].getMergedValueEq(dir)):(planes[nsTile0][np0].getMergedValue(dir));
// double [] merged_ev =planes[nsTile0][np0].getMergedValue(dir);
int best_np = -1;
for (int np = 0; np < merged_valid.length; np++){
if (merged_valid[np] && ((best_np < 0) || (merged_ev[dir][np] < merged_ev[dir][best_np]))){
best_np = np;
}
}
if (best_np >=0) {
neib_list.add(new Point(dir, best_np));
}
}
Collections.sort(neib_list, new Comparator<Point>() {
@Override
public int compare(Point lhs, Point rhs) {
// -1 - less than, 1 - greater than, 0 - equal, all inverted for descending
return (merged_ev[lhs.x][lhs.y] < merged_ev[rhs.x][rhs.y]) ? -1 : (merged_ev[lhs.x][lhs.y] > merged_ev[rhs.x][rhs.y]) ? 1 : 0;
}
});
int [] nb = {-1,-1,-1,-1,-1,-1,-1,-1};
if (!neib_list.isEmpty()) {
double cut_value = merged_ev[neib_list.get(0).x][neib_list.get(0).y]*plCutTail;
if (cut_value < plMinTail) cut_value = plMinTail;
for (Point p: neib_list){
int dir = p.x;
int np = p.y;
if (merged_ev[dir][np] <= cut_value ){
int stx = stx0 + dirsYX[dir][1];
int sty = sty0 + dirsYX[dir][0];
int nsTile = sty * stilesX + stx; // from where to get
nb[dir] = np;
TilePlanes.PlaneData other_plane = planes[nsTile0][np0].getPlaneToThis(
planes[nsTile][np],
dl - 3); // debugLevel);
if (other_plane != null){
TilePlanes.PlaneData merged_pd_back = merged_pd.clone();
merged_pd = merged_pd.mergePlaneToThis(
other_plane, // PlaneData otherPd,
1.0, // double scale_other,
1.0, // double starWeightPwr, // Use this power of tile weight when calculating connection cost
false, // boolean ignore_weights,
true, // boolean sum_weights,
plPreferDisparity,
dl - 3); // int debugLevel)
if (merged_pd.getValue() > plMaxEigen){
nb[dir] = -1;
if (dl > -1){
String s = "[";
for (int i = 0; i < 8; i++){
s += (nb[i]>=0) ? nb[i]:"x";
if (i < 7) s += ", ";
}
s+="]";
System.out.println("setNonExclusive() nsTile0="+nsTile0+":"+np0+
" composite weighted plane value "+merged_pd.getValue()+
" exceeded plMaxEigen="+plMaxEigen+
". Removing last contributor: dir="+dir+", np="+np+
", remaining: "+s);
}
merged_pd = merged_pd_back.clone();
break;
}
}
}
}
merged_pd.getWorldXYZ(0); // debugLevel); // just to recalculate world data for debugging
merged_pd.setNeibBest(nb);
planes[nsTile0][np0].setNonexclusiveStar(merged_pd);
if (dl > 0){
String neib_str = "";
for (int dir = 0; dir < 8; dir++){
neib_str += (nb[dir]>=0)?nb[dir]:"x";
if (dir < 7) neib_str += ", ";
}
System.out.println("setNonExclusive() nsTile0="+nsTile0+":"+np0+
" weighted neighbors ["+neib_str+"], cutoff value = "+cut_value+
" merged value = "+merged_pd.getValue());
}
}
final double [][] merged_ev_eq = planes[nsTile0][np0].getMergedValueEq();
merged_pd = planes[nsTile0][np0];
neib_list = new ArrayList<Point>();
for (int dir = 0; dir < 8; dir++) if (planes[nsTile0][np0].hasMergedValid(dir)){ //
int stx = stx0 + dirsYX[dir][1];
int sty = sty0 + dirsYX[dir][0];
int nsTile = sty * stilesX + stx; // from where to get
// find best individual connection among valid ones
boolean [] merged_valid = planes[nsTile0][np0].getMergedValid(dir);
// double [] merged_ev = ignore_weights? (planes[nsTile0][np0].getMergedValueEq(dir)):(planes[nsTile0][np0].getMergedValue(dir));
int best_np = -1;
for (int np = 0; np < merged_valid.length; np++){
if (merged_valid[np] && ((best_np < 0) || (merged_ev_eq[dir][np] < merged_ev_eq[dir][best_np]))){
best_np = np;
}
}
if (best_np >=0) {
neib_list.add(new Point(dir, best_np));
}
}
Collections.sort(neib_list, new Comparator<Point>() {
@Override
public int compare(Point lhs, Point rhs) {
// -1 - less than, 1 - greater than, 0 - equal, all inverted for descending
return (merged_ev_eq[lhs.x][lhs.y] < merged_ev_eq[rhs.x][rhs.y]) ? -1 : (merged_ev_eq[lhs.x][lhs.y] > merged_ev_eq[rhs.x][rhs.y]) ? 1 : 0;
}
});
int [] nb_eq = {-1,-1,-1,-1,-1,-1,-1,-1};
if (!neib_list.isEmpty()) {
double cut_value = merged_ev_eq[neib_list.get(0).x][neib_list.get(0).y]*plCutTail;
if (cut_value < plMinTail) cut_value = plMinTail;
for (Point p: neib_list){
int dir = p.x;
int np = p.y;
if (merged_ev_eq[dir][np] <= cut_value ){
int stx = stx0 + dirsYX[dir][1];
int sty = sty0 + dirsYX[dir][0];
int nsTile = sty * stilesX + stx; // from where to get
nb_eq[dir] = np;
TilePlanes.PlaneData other_plane = planes[nsTile0][np0].getPlaneToThis(
planes[nsTile][np],
dl - 3); // debugLevel);
TilePlanes.PlaneData merged_pd_back = merged_pd.clone();
if (other_plane != null){
merged_pd = merged_pd.mergePlaneToThis(
other_plane, // PlaneData otherPd,
1.0, // double scale_other,
1.0, // double starWeightPwr, // Use this power of tile weight when calculating connection cost
true, // boolean ignore_weights,
true, // boolean sum_weights,
plPreferDisparity,
dl - 3); // int debugLevel)
if (merged_pd.getValue() > plMaxEigen){
nb_eq[dir] = -1;
if (dl > -1){
String s = "[";
for (int i = 0; i < 8; i++){
s += (nb_eq[i]>=0) ? nb_eq[i]:"x";
if (i < 7) s += ", ";
}
s+="]";
System.out.println("setNonExclusive() nsTile0="+nsTile0+":"+np0+
" composite equalized plane value "+merged_pd.getValue()+
" exceeded plMaxEigen="+plMaxEigen+
". Removing last contributor: dir="+dir+", np="+np+
", remaining: "+s);
}
merged_pd = merged_pd_back.clone();
break;
}
}
}
}
merged_pd.getWorldXYZ(0); // debugLevel); // just to recalculate world data for debugging
merged_pd.setNeibBest(nb_eq);
planes[nsTile0][np0].setNonexclusiveStarEq(merged_pd);
if (dl > 0){
String neib_str = "";
for (int dir = 0; dir < 8; dir++){
neib_str += (nb_eq[dir]>=0)?nb_eq[dir]:"x";
if (dir < 7) neib_str += ", ";
}
System.out.println("setNonExclusive() nsTile0="+nsTile0+":"+np0+
" equalized neighbors ["+neib_str+"], cutoff value = "+cut_value+
" merged value = "+merged_pd.getValue());
}
}
}
}
}
}
};
}
ImageDtt.startAndJoin(threads);
}
public void setNonExclusive( public void setNonExclusive(
final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks" final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks"
final TilePlanes.PlaneData [][] planes, final TilePlanes.PlaneData [][] planes,
...@@ -1961,6 +1781,15 @@ public class LinkPlanes { ...@@ -1961,6 +1781,15 @@ public class LinkPlanes {
} }
/**
* Select initial pairs of plane merge candidates (same supertile plane). All pairs are ordered, first is lower than second
* Candidates for merging share some non-exclusive neighbors
* @param planes per supertile, per plane - array of supertile instances
* @param debugLevel debug level
* @param dbg_X tile x-index for detailed debug data
* @param dbg_Y tile y-index for detailed debug data
* @return array of per-supertile, per pair {start plane, end_plane} pairs
*/
public int [][][] getMergeSameTileCandidates( public int [][][] getMergeSameTileCandidates(
final TilePlanes.PlaneData [][] planes, final TilePlanes.PlaneData [][] planes,
final int debugLevel, final int debugLevel,
...@@ -2171,12 +2000,29 @@ public class LinkPlanes { ...@@ -2171,12 +2000,29 @@ public class LinkPlanes {
return filtered_merge_candidates; return filtered_merge_candidates;
} }
/**
* Verify merge candidates by evaluating overlaps. Close planes that have significant overlap on the image are mofre likely
* to be actually different planes, if they do not overlap it is likely to be the same one, just multiple separate parts of
* it mistakenly discriminated during initial plane generation from the tiles data.
*
* Merging is also permitted if the planes are "weak and similar" - low strength and fit for merging
*
* Added a 'hack' - allow merging even overlapping tiles if they are close enough in the real world space
*
* @param planes per supertile, per plane - array of supertile instances
* @param merge_candidates array of merge pairs (per-supertile, per pair {start plane, end_plane} pairs)
* @param min_distance minimal distance (in meters) to keep merging overlapping pair
* @param debugLevel debug level
* @param dbg_X tile x-index for detailed debug data
* @param dbg_Y tile y-index for detailed debug data
* @return per super tile , per first plane index, per second plane index boolean array of permitted to merge
*/
public boolean [][][] overlapSameTileCandidates( public boolean [][][] overlapSameTileCandidates(
final TilePlanes.PlaneData [][] planes, final TilePlanes.PlaneData [][] planes,
final int [][][] merge_candidates, final int [][][] merge_candidates,
// may be a hack - has problems with not merging some close portions of the (horizontal) pavement - // may be a hack - has problems with not merging some close portions of the (horizontal) pavement -
// maybe just some dirty window glitches // maybe just some dirty window glitches
// ignore overlap if the planes are close in real world. Only comparing distances from good plates // ignore overlap if the planes are close in the real world. Only comparing distances from good plates
final double min_distance, final double min_distance,
final int debugLevel, final int debugLevel,
final int dbg_X, final int dbg_X,
...@@ -2502,14 +2348,18 @@ public class LinkPlanes { ...@@ -2502,14 +2348,18 @@ public class LinkPlanes {
/** /**
* Possible problem is that "normalizing" merge quality for low weights is not applicable for "star" plane that include neighhbors * Possible problem is that "normalizing" merge quality for low weights is not applicable for "star" plane that include neighbors
* Switch to a single "cost" function (costSameTileConnectionsAlt()) * Also calculates and outputs some of other costs - just for debugging
* @param planes * TODO: Switch to a single "cost" function (costSameTileConnectionsAlt())
* @param merge_candidates * @param planes per supertile, per plane - array of supertile instances
* @param valid_candidates * @param merge_candidates array of merge pairs (per-supertile, per pair {start plane, end_plane} pairs)
* @param debugLevel * @param valid_candidates per super tile , per first plane index, per second plane index boolean array of permitted to merge.
* @param dbg_X * valid_candidates array is updated as a result of this method
* @param dbg_Y * @param relax - multiply thresholds by this value, so when relax > 1.0, the fitting requirements are loosened (used for
* conflicting planes)
* @param debugLevel debug level
* @param dbg_X tile x-index for detailed debug data
* @param dbg_Y tile y-index for detailed debug data
*/ */
public void costSameTileConnections( public void costSameTileConnections(
final TilePlanes.PlaneData [][] planes, final TilePlanes.PlaneData [][] planes,
...@@ -2535,7 +2385,9 @@ public class LinkPlanes { ...@@ -2535,7 +2385,9 @@ public class LinkPlanes {
threads[ithread] = new Thread() { threads[ithread] = new Thread() {
public void run() { public void run() {
for (int nsTile0 = ai.getAndIncrement(); nsTile0 < nStiles; nsTile0 = ai.getAndIncrement()) if ( merge_candidates[nsTile0] != null) { for (int nsTile0 = ai.getAndIncrement(); nsTile0 < nStiles; nsTile0 = ai.getAndIncrement()) if ( merge_candidates[nsTile0] != null) {
int dl = ((debugLevel > 0) && (nsTile0 == debug_stile)) ? 3: ((debugLevel > 1) ? 2:0); // int dl = ((debugLevel > 0) && (nsTile0 == debug_stile)) ? 3: ((debugLevel > 1) ? 2:0);
int dl = ((debugLevel > 1) && (nsTile0 == debug_stile)) ? 3: debugLevel;
if (dl > 2){ if (dl > 2){
System.out.println("costSameTileConnections(): nsTile="+nsTile0); System.out.println("costSameTileConnections(): nsTile="+nsTile0);
} }
...@@ -2557,7 +2409,7 @@ public class LinkPlanes { ...@@ -2557,7 +2409,7 @@ public class LinkPlanes {
Double.NaN, // double merged_wev, // if NaN will calculate assuming the same supertile - for world Double.NaN, // double merged_wev, // if NaN will calculate assuming the same supertile - for world
Double.NaN, // double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world Double.NaN, // double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world
prefix, // String prefix, prefix, // String prefix,
dl -1); // int debugLevel) dl - 2); // int debugLevel)
prefix = "costSameTileConnections() fit equal weight: nsTile0="+nsTile0+" np1="+np1+" np2="+np2; prefix = "costSameTileConnections() fit equal weight: nsTile0="+nsTile0+" np1="+np1+" np2="+np2;
boolean fit2 = planesFit( boolean fit2 = planesFit(
planes[nsTile0][np1].getNonexclusiveStarEqFb(), // TilePlanes.PlaneData plane1, // should belong to the same supertile (or be converted for one) planes[nsTile0][np1].getNonexclusiveStarEqFb(), // TilePlanes.PlaneData plane1, // should belong to the same supertile (or be converted for one)
...@@ -2569,24 +2421,24 @@ public class LinkPlanes { ...@@ -2569,24 +2421,24 @@ public class LinkPlanes {
Double.NaN, // double merged_wev, // if NaN will calculate assuming the same supertile - for world Double.NaN, // double merged_wev, // if NaN will calculate assuming the same supertile - for world
Double.NaN, // double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world Double.NaN, // double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world
prefix, // String prefix, prefix, // String prefix,
dl -1); // int debugLevel) dl - 2); // int debugLevel)
// if (!fit1 || !fit2){ // if (!fit1 || !fit2){
if (!fit1 && !fit2){ if (!fit1 && !fit2){
valid_candidates[nsTile0][np1][np2] = false; valid_candidates[nsTile0][np1][np2] = false;
valid_candidates[nsTile0][np2][np1] = false; valid_candidates[nsTile0][np2][np1] = false;
if (dl > -1){ if (dl > 0){
System.out.println("costSameTileConnections(): nsTile="+nsTile0+":"+np1+":"+np2+ System.out.println("costSameTileConnections(): nsTile="+nsTile0+":"+np1+":"+np2+
" REMOVING PAIR, fit1="+fit1+" fit2="+fit2); " REMOVING PAIR, fit1="+fit1+" fit2="+fit2);
} }
} else { } else {
if (dl > -1){ if (dl > 0){
System.out.println("costSameTileConnections(): nsTile="+nsTile0+":"+np1+":"+np2+ System.out.println("costSameTileConnections(): nsTile="+nsTile0+":"+np1+":"+np2+
" KEEPING PAIR, fit1="+fit1+" fit2="+fit2); " KEEPING PAIR, fit1="+fit1+" fit2="+fit2);
} }
} }
if (dl>0){ if (dl > 1){
double [][] costs = new double[6][]; double [][] costs = new double[6][];
costs[0] = getFitQualities( costs[0] = getFitQualities(
true, // en_sticks, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks" true, // en_sticks, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks"
...@@ -2766,7 +2618,21 @@ public class LinkPlanes { ...@@ -2766,7 +2618,21 @@ public class LinkPlanes {
ImageDtt.startAndJoin(threads); ImageDtt.startAndJoin(threads);
return filtered_pairs; return filtered_pairs;
} }
/**
* Calculate costs of merging planes of the same supertile and remove those that are exceed threshold
* Also calculates and outputs some of other costs - just for debugging
* @param threshold cost threshold for planes that do have valid non-exclusive neighbors that help to determine
* plane orientation
* @param threshold_nostar cost threshold for the planes that do not have valid neighbors
* @param planes per supertile, per plane - array of supertile instances
* @param merge_candidates array of merge pairs (per-supertile, per pair {start plane, end_plane} pairs)
* @param valid_candidates per super tile , per first plane index, per second plane index boolean array of permitted to merge.
* valid_candidates array is updated as a result of this method
* @param debugLevel debug level
* @param dbg_X tile x-index for detailed debug data
* @param dbg_Y tile y-index for detailed debug data
*/
public void costSameTileConnectionsAlt( public void costSameTileConnectionsAlt(
final double threshold, final double threshold,
final double threshold_nostar, final double threshold_nostar,
...@@ -2792,7 +2658,9 @@ public class LinkPlanes { ...@@ -2792,7 +2658,9 @@ public class LinkPlanes {
threads[ithread] = new Thread() { threads[ithread] = new Thread() {
public void run() { public void run() {
for (int nsTile0 = ai.getAndIncrement(); nsTile0 < nStiles; nsTile0 = ai.getAndIncrement()) if ( merge_candidates[nsTile0] != null) { for (int nsTile0 = ai.getAndIncrement(); nsTile0 < nStiles; nsTile0 = ai.getAndIncrement()) if ( merge_candidates[nsTile0] != null) {
int dl = ((debugLevel > 0) && (nsTile0 == debug_stile)) ? 3: ((debugLevel > 1) ? 2:0); // int dl = ((debugLevel > 0) && (nsTile0 == debug_stile)) ? 3: ((debugLevel > 1) ? 2:0);
int dl = ((debugLevel > 1) && (nsTile0 == debug_stile)) ? 3: debugLevel;
if (dl > 2){ if (dl > 2){
System.out.println("costSameTileConnectionsAlt(): nsTile="+nsTile0); System.out.println("costSameTileConnectionsAlt(): nsTile="+nsTile0);
} }
...@@ -2817,7 +2685,7 @@ public class LinkPlanes { ...@@ -2817,7 +2685,7 @@ public class LinkPlanes {
Double.NaN, // double merged_wev, // if NaN will calculate assuming the same supertile - for world Double.NaN, // double merged_wev, // if NaN will calculate assuming the same supertile - for world
Double.NaN, // double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world Double.NaN, // double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world
prefix, // String prefix, prefix, // String prefix,
dl); // int debugLevel) dl - 1); // int debugLevel)
prefix = "costSameTileConnectionsAlt() fit equal weight: nsTile0="+nsTile0+" np1="+np1+" np2="+np2; prefix = "costSameTileConnectionsAlt() fit equal weight: nsTile0="+nsTile0+" np1="+np1+" np2="+np2;
costs[1] = getLinkCost( costs[1] = getLinkCost(
true, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks" true, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks"
...@@ -2828,7 +2696,7 @@ public class LinkPlanes { ...@@ -2828,7 +2696,7 @@ public class LinkPlanes {
Double.NaN, // double merged_wev, // if NaN will calculate assuming the same supertile - for world Double.NaN, // double merged_wev, // if NaN will calculate assuming the same supertile - for world
Double.NaN, // double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world Double.NaN, // double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world
prefix, // String prefix, prefix, // String prefix,
dl); // int debugLevel) dl - 1); // int debugLevel)
boolean star1 = (planes[nsTile0][np1].getNonexclusiveStar() != null) && (planes[nsTile0][np2].getNonexclusiveStar() != null); boolean star1 = (planes[nsTile0][np1].getNonexclusiveStar() != null) && (planes[nsTile0][np2].getNonexclusiveStar() != null);
boolean star2 = (planes[nsTile0][np1].getNonexclusiveStarEq() != null) && (planes[nsTile0][np2].getNonexclusiveStarEq() != null); boolean star2 = (planes[nsTile0][np1].getNonexclusiveStarEq() != null) && (planes[nsTile0][np2].getNonexclusiveStarEq() != null);
boolean fit1 = costs[0] < (star1 ? threshold : threshold_nostar); boolean fit1 = costs[0] < (star1 ? threshold : threshold_nostar);
...@@ -2837,17 +2705,17 @@ public class LinkPlanes { ...@@ -2837,17 +2705,17 @@ public class LinkPlanes {
if (!fit1 && !fit2){ if (!fit1 && !fit2){
valid_candidates[nsTile0][np1][np2] = false; valid_candidates[nsTile0][np1][np2] = false;
valid_candidates[nsTile0][np2][np1] = false; valid_candidates[nsTile0][np2][np1] = false;
if (dl > -1){ if (dl > 0){
System.out.println("costSameTileConnectionsAlt(): nsTile="+nsTile0+":"+np1+":"+np2+ System.out.println("costSameTileConnectionsAlt(): nsTile="+nsTile0+":"+np1+":"+np2+
" REMOVING PAIR, fit1="+fit1+" fit2="+fit2+ " (star1="+star1+", star2="+star2+")"); " REMOVING PAIR, fit1="+fit1+" fit2="+fit2+ " (star1="+star1+", star2="+star2+")");
} }
} else { } else {
if (dl > -1){ if (dl > 0){
System.out.println("costSameTileConnectionsAlt(): nsTile="+nsTile0+":"+np1+":"+np2+ System.out.println("costSameTileConnectionsAlt(): nsTile="+nsTile0+":"+np1+":"+np2+
" KEEPING PAIR, fit1="+fit1+" fit2="+fit2+ " (star1="+star1+", star2="+star2+")"); " KEEPING PAIR, fit1="+fit1+" fit2="+fit2+ " (star1="+star1+", star2="+star2+")");
} }
} }
if (dl>0){ if (dl > 1){
double [][] costs0 = new double[6][]; double [][] costs0 = new double[6][];
costs0[0] = getFitQualities( costs0[0] = getFitQualities(
true, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks" true, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks"
...@@ -3246,11 +3114,27 @@ public class LinkPlanes { ...@@ -3246,11 +3114,27 @@ public class LinkPlanes {
ImageDtt.startAndJoin(threads); ImageDtt.startAndJoin(threads);
return merge_pairs; return merge_pairs;
} }
/**
* Extract non-conflicting groups of the same supertile planes that are OK to merge. For multiple intersecting pairs
* this method starts for from the lowest cost merging and adds planes (from merge_candidates) until they do not cause
* conflicts with already selected ones
* @param merge_candidates array of merge pairs (per-supertile, per pair {start plane, end_plane} pairs)
* @param valid_candidates per super tile , per first plane index, per second plane index boolean array of permitted to merge.
* valid_candidates array is updated as a result of this method
* @param relax - multiply thresholds by this value, so when relax > 1.0, the fitting requirements are loosened (used for
* conflicting planes)
* @param debugLevel debug level
* @param dbg_X tile x-index for detailed debug data
* @param dbg_Y tile y-index for detailed debug data
* @return array of per supertile group sets for merging. Each group set is an array of groups. Each group is an array
* of plane indices.
*/
// TODO: get rid of quality_index use getLinkCost()
public int [][][] extractMergeSameTileGroups( public int [][][] extractMergeSameTileGroups(
final TilePlanes.PlaneData [][] planes, final TilePlanes.PlaneData [][] planes,
final int [][][] merge_candidates, final int [][][] merge_candidates,
final boolean [][][] plane_nooverlaps, final boolean [][][] valid_candidates, // will be updated
// final boolean [][][] plane_nooverlaps,
final double relax, final double relax,
final int debugLevel, final int debugLevel,
...@@ -3275,8 +3159,13 @@ public class LinkPlanes { ...@@ -3275,8 +3159,13 @@ public class LinkPlanes {
threads[ithread] = new Thread() { threads[ithread] = new Thread() {
public void run() { public void run() {
for (int nsTile = ai.getAndIncrement(); nsTile < nStiles; nsTile = ai.getAndIncrement()) if ( merge_candidates[nsTile] != null) { for (int nsTile = ai.getAndIncrement(); nsTile < nStiles; nsTile = ai.getAndIncrement()) if ( merge_candidates[nsTile] != null) {
if (planes[nsTile] == null){
System.out.println("extractMergeSameTileGroups() planes["+nsTile+"] = null");
continue;
}
boolean [][] merge_pairs = new boolean [planes[nsTile].length][planes[nsTile].length]; boolean [][] merge_pairs = new boolean [planes[nsTile].length][planes[nsTile].length];
int dl = ((debugLevel > 0) && (nsTile == debug_stile)) ? 2: ((debugLevel > 1) ? 1:0); // int dl = ((debugLevel > 1) && (nsTile == debug_stile)) ? 2: ((debugLevel > 1) ? 1:0);
int dl = ((debugLevel > 1) && (nsTile == debug_stile)) ? 3: debugLevel;
if (dl > 1){ if (dl > 1){
System.out.println("extractMergeSameTileGroups(): nsTile="+nsTile); System.out.println("extractMergeSameTileGroups(): nsTile="+nsTile);
} }
...@@ -3284,7 +3173,7 @@ public class LinkPlanes { ...@@ -3284,7 +3173,7 @@ public class LinkPlanes {
for (int pair = 0; pair < merge_candidates[nsTile].length; pair ++){ for (int pair = 0; pair < merge_candidates[nsTile].length; pair ++){
int np1 = merge_candidates[nsTile][pair][0]; int np1 = merge_candidates[nsTile][pair][0];
int np2 = merge_candidates[nsTile][pair][1]; int np2 = merge_candidates[nsTile][pair][1];
if ((plane_nooverlaps == null) || (plane_nooverlaps[nsTile] == null) || plane_nooverlaps[nsTile][np1][np2]) { if ((valid_candidates == null) || (valid_candidates[nsTile] == null) || valid_candidates[nsTile][np1][np2]) {
yet_to_merge.add(np1); yet_to_merge.add(np1);
yet_to_merge.add(np2); yet_to_merge.add(np2);
String prefix = "extractMergeSameTileGroups() pair="+pair+" nsTile="+nsTile+" np1="+np1+" np2="+np2; String prefix = "extractMergeSameTileGroups() pair="+pair+" nsTile="+nsTile+" np1="+np1+" np2="+np2;
...@@ -3298,7 +3187,7 @@ public class LinkPlanes { ...@@ -3298,7 +3187,7 @@ public class LinkPlanes {
Double.NaN, // double merged_wev, // if NaN will calculate assuming the same supertile - for world Double.NaN, // double merged_wev, // if NaN will calculate assuming the same supertile - for world
Double.NaN, // double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world Double.NaN, // double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world
prefix, // String prefix, prefix, // String prefix,
dl) // int debugLevel) dl-1) // int debugLevel)
){ ){
merge_pairs[np1][np2] = true; merge_pairs[np1][np2] = true;
merge_pairs[np2][np1] = true; merge_pairs[np2][np1] = true;
...@@ -3379,7 +3268,7 @@ public class LinkPlanes { ...@@ -3379,7 +3268,7 @@ public class LinkPlanes {
Double.NaN, // double merged_wev, // if NaN will calculate assuming the same supertile - for world Double.NaN, // double merged_wev, // if NaN will calculate assuming the same supertile - for world
Double.NaN, // double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world Double.NaN, // double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world
prefix, // String prefix, prefix, // String prefix,
dl); // int debugLevel) dl-1); // int debugLevel)
if (qualities != null) { if (qualities != null) {
double this_rq = qualities[quality_index]; double this_rq = qualities[quality_index];
if ((best_pair == null) || (best_quality > this_rq)){ if ((best_pair == null) || (best_quality > this_rq)){
...@@ -3416,9 +3305,9 @@ public class LinkPlanes { ...@@ -3416,9 +3305,9 @@ public class LinkPlanes {
// make sure it does not overlap with any of existing // make sure it does not overlap with any of existing
boolean nooverlap = true; boolean nooverlap = true;
for (Integer np1: sub_group){ for (Integer np1: sub_group){
if ((plane_nooverlaps != null) && if ((valid_candidates != null) &&
(plane_nooverlaps[nsTile] != null) && (valid_candidates[nsTile] != null) &&
!plane_nooverlaps[nsTile][np][np1]) { !valid_candidates[nsTile][np][np1]) {
nooverlap = false; nooverlap = false;
break; break;
} }
...@@ -3434,7 +3323,7 @@ public class LinkPlanes { ...@@ -3434,7 +3323,7 @@ public class LinkPlanes {
Double.NaN, // double merged_wev, // if NaN will calculate assuming the same supertile - for world Double.NaN, // double merged_wev, // if NaN will calculate assuming the same supertile - for world
Double.NaN, // double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world Double.NaN, // double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world
prefix, // String prefix, prefix, // String prefix,
dl); // int debugLevel) dl-1); // int debugLevel)
if (qualities != null) { if (qualities != null) {
double this_rq = qualities[quality_index]; double this_rq = qualities[quality_index];
if ((bestMatch == null) || (best_quality > this_rq)){ if ((bestMatch == null) || (best_quality > this_rq)){
...@@ -3567,7 +3456,7 @@ public class LinkPlanes { ...@@ -3567,7 +3456,7 @@ public class LinkPlanes {
double rquality = (L - Lav)*(w1+w2)*(w1+w2) /(Lav*w1*w2); double rquality = (L - Lav)*(w1+w2)*(w1+w2) /(Lav*w1*w2);
if (rquality < minVal){ if (rquality < minVal){
System.out.println("BUG: mergeRQuality("+L1_in+", "+L2_in+", "+L_in+", "+w1+", "+w2+", "+eigen_floor+") -> "+rquality); // System.out.println("Precision limitation for the same tile merging: mergeRQuality("+L1_in+", "+L2_in+", "+L_in+", "+w1+", "+w2+", "+eigen_floor+") -> "+rquality);
rquality=minVal; rquality=minVal;
} }
return rquality; return rquality;
...@@ -3689,5 +3578,800 @@ public class LinkPlanes { ...@@ -3689,5 +3578,800 @@ public class LinkPlanes {
} }
// result is needed for debug only
public TilePlanes.PlaneData [][][] conditionSuperTiles(
final TilePlanes.PlaneData [][] planes,
final int max_num_merge_try,
final int debugLevel)
{
// try to merge multiple times
TilePlanes.PlaneData [][][] dbg_orig_planes = null;
if (debugLevel > 0) {
dbg_orig_planes = new TilePlanes.PlaneData [max_num_merge_try][][];
}
for (int num_merge_try = 0; num_merge_try < max_num_merge_try; num_merge_try++){
// Calculate what be thickness eigenvalues for merging this plane with individual neighbors
matchPlanes(
planes, // final TilePlanes.PlaneData [][] planes,
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
// Create matrix of connection costs between layer pairs for each direction. Even as currently costs seem to be symmetrical
// this method calculates all 8 directions, from the PoV of the current supertile.
interPlaneCosts( //
true, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks"
planes, // final TilePlanes.PlaneData [][] planes,
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
// Mark which links between neighbor planes are valid. Uses planesFit method, not costs
// results used by resolveStarConflict(), getMergeSameTileCandidates(), setNonExclusive()
// TODO: remove and switch to use connection costs
// Sets both setMergedValid and setMergedStrongValid - should they be removed later
filterNeighborPlanes(
planes, // final TilePlanes.PlaneData [][] planes,
true, // final boolean merge_low_eigen,
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
// Set links between planes in 8 directions, in such a way that no start/end planes can be shared
setExclusiveLinks(
planes, // final TilePlanes.PlaneData [][] planes,
getExNeibCost(), // final double max_cost,
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
// Merge the supertile planes with agreeing neighbors, non-exclusively (no considering other planes
// of the same supertile. Start with the best fit, then goes to lower quality, until the individual
// merge quality falls below scaled quality of the best, pre-set minimum or the merged plane becomes
// too thick.
// Separately calculates merged weighted plane and with equal weights of the neighbors
// TODO: Maybe just switch to the connection costs instead?
setNonExclusive(
true, // final boolean en_sticks, // allow merging with bad plates
planes, // final TilePlanes.PlaneData [][] planes,
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
// Select initial pairs of plane merge candidates (same supertile plane). All pairs are ordered, first is lower than second
// Candidates for merging share some non-exclusive neighbors
int [][][] merge_candidates = getMergeSameTileCandidates(
planes, // final TilePlanes.PlaneData [][] planes,
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
// Verify merge candidates by evaluating overlaps. Close planes that have significant overlap on the image are mofre likely
// to be actually different planes, if they do not overlap it is likely to be the same one, just multiple separate parts of
// it mistakenly discriminated during initial plane generation from the tiles data.
//
// Merging is also permitted if the planes are "weak and similar" - low strength and fit for merging
//
// Added a 'hack' - allow merging even overlapping tiles if they are close enough in the real world space
boolean [][][] valid_candidates = overlapSameTileCandidates (
planes, // final TilePlanes.PlaneData [][] planes,
merge_candidates, // final int [][][] merge_candidates,
// TODO: use parameters
0.2, // final double min_distance,
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
// * Possible problem is that "normalizing" merge quality for low weights is not applicable for "star" plane that includes neighhbors
// * Switch to a single "cost" function (costSameTileConnectionsAlt())
// Still - how to merge stray tiles that do not have neighbors/star? Still merge them "old way" (costSameTileConnections()) if at least 1 does not
// have a "star"
// Possible problem is that "normalizing" merge quality for low weights is not applicable for "star" plane that include neighbors
// ALso calculates and outputs some of other costs - just for debugging
// TODO: Switch to a single "cost" function (costSameTileConnectionsAlt())
costSameTileConnections(
planes, // final TilePlanes.PlaneData [][] planes,
merge_candidates, // final int [][][] merge_candidates,
valid_candidates, // final boolean [][][] valid_candidates, // will be updated
1.0, // double relax,
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
// Calculate costs of merging planes of the same supertile and remove those that are exceed threshold
// Also calculates and outputs some of other costs - just for debugging
costSameTileConnectionsAlt(
//5.0, // final double threshold,
//10.0, // final double threshold_nostar,
getMergeCostStar(), // relax_for_conflicts * 5.0, // final double threshold, //
getMergeCostNoStar(), //relax_for_conflicts * 10.0, // final double threshold_nostar,
planes, // final TilePlanes.PlaneData [][] planes,
merge_candidates, // final int [][][] merge_candidates,
valid_candidates, // final boolean [][][] valid_candidates, // will be updated
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
// Extract non-conflicting groups of the same supertile planes that are OK to merge. For multiple intersecting pairs
// this method starts for from the lowest cost merging and adds planes (from merge_candidates) until they do not cause
// conflicts with already selected ones
int [][][] merge_groups = extractMergeSameTileGroups(
planes, // final TilePlanes.PlaneData [][] planes,
merge_candidates, // final int [][][] merge_candidates,
valid_candidates, // boolean [][][] plane_overlaps,
1.0, // double relax,
debugLevel + 1, // final int debugLevel)
dbg_tileX,
dbg_tileY);
// Save current planes for debugging, before merging
if (dbg_orig_planes!=null) {
dbg_orig_planes[num_merge_try] = planes.clone();
for (int nsTile=0; nsTile < planes.length; nsTile++) if (planes[nsTile] != null){
dbg_orig_planes[num_merge_try][nsTile] = planes[nsTile].clone();
for (int np = 0; np < planes[nsTile].length; np++ ) if (planes[nsTile][np] != null){
dbg_orig_planes[num_merge_try][nsTile][np] = planes[nsTile][np].clone();
}
}
}
// Actually merge the planes by merging corresponding planes tile selections and re-building planes
// Apply same supertile planes merge by combining tiles and re-generating ellipsoids by diagonalizing
// covariance matrices. Some outliers may be removed after merge
int num_removed_by_merging = st.applyMergePlanes(
planes, // final TilePlanes.PlaneData[][] planes,
merge_groups, // final int [][][] merge_groups,
// parameters to generate ellipsoids
0.0, // 3, // final double disp_far, // minimal disparity to select (or NaN)
Double.NaN, // final double disp_near, // maximal disparity to select (or NaN)
plDispNorm, // final double dispNorm, // Normalize disparities to the average if above
0.0, // final double min_weight,
plMinPoints, // final int min_tiles,
// parameters to reduce outliers
plTargetEigen, // final double targetEigen, // = 0.1; // Remove outliers until main axis eigenvalue (possibly scaled by plDispNorm) gets below
plFractOutliers, // final double fractOutliers, // = 0.3; // Maximal fraction of outliers to remove
plMaxOutliers, // final int maxOutliers, // = 20; // Maximal number of outliers to remove
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
if ( debugLevel > -1) {
System.out.println("conditionSuperTiles(): try "+num_merge_try+ ": removed "+num_removed_by_merging+((num_removed_by_merging>0)?" planes by merging, recalculating connections":""));
}
if (num_removed_by_merging == 0){ // re-calculate all links
break;
}
}
return dbg_orig_planes;
}
public TilePlanes.PlaneData[][] copyPlanes(
TilePlanes.PlaneData[][] src_planes)
{
TilePlanes.PlaneData[][] dst_planes = new TilePlanes.PlaneData[src_planes.length][];
return copyPlanes(src_planes, dst_planes);
}
public TilePlanes.PlaneData[][] copyPlanes(
final TilePlanes.PlaneData[][] src_planes,
final TilePlanes.PlaneData[][] dst_planes)
{
final Thread[] threads = ImageDtt.newThreadArray(st.tileProcessor.threadsMax);
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nsTile = ai.getAndIncrement(); nsTile < src_planes.length; nsTile = ai.getAndIncrement()) {
if (src_planes[nsTile] != null){
dst_planes[nsTile] = new TilePlanes.PlaneData[src_planes[nsTile].length];
for (int np = 0; np < src_planes[nsTile].length; np++){
if (src_planes[nsTile][np] != null){
dst_planes[nsTile][np] = src_planes[nsTile][np].clone();
} else {
dst_planes[nsTile][np] = null;
}
}
} else {
dst_planes[nsTile] = null;
}
}
}
};
}
ImageDtt.startAndJoin(threads);
return dst_planes;
}
public TilePlanes.PlaneData[][] planesSmooth(
final TilePlanes.PlaneData[][] planes,
TilePlanes.PlaneData[][] planes_mod, // should start with a copy of planes
final int debugLevel,
final int dbg_X,
final int dbg_Y)
{
// if (planes_mod == null){
// planes_mod =copyPlanes(planes); // make always (for now) *********************
// }
final double maxDiff = Math.pow(10.0, -plPrecision);
for (int pass = 0; pass < plIterations; pass++){
double diff = planesSmoothStep(
planes, // final TilePlanes.PlaneData[][] measured_planes,
planes_mod, // final TilePlanes.PlaneData[][] mod_planes,
true, // final boolean calc_diff,
(pass < 10)? debugLevel: 0,
dbg_X,
dbg_Y);
if (diff < maxDiff){
if (debugLevel > -1){
System.out.println("planesSmooth(): pass:"+pass+" (of "+plIterations+"), rms = "+diff+" < "+maxDiff);
break;
}
}
if (debugLevel > -1){
System.out.println("planesSmooth() - pass:"+pass+" (of "+plIterations+"), rms = "+diff);
}
}
return planes_mod;
}
public double planesSmoothStep(
final TilePlanes.PlaneData[][] measured_planes,
final TilePlanes.PlaneData[][] mod_planes,
final boolean calc_diff,
final int debugLevel,
final int dbg_X,
final int dbg_Y)
{
final int [][] dirsYX = {{-1, 0},{-1,1},{0,1},{1,1},{1,0},{1,-1},{0,-1},{-1,-1}};
final int tilesX = st.tileProcessor.getTilesX();
// final int tilesY = st.tileProcessor.getTilesY();
final int superTileSize = st.tileProcessor.getSuperTileSize();
final int stilesX = (tilesX + superTileSize -1)/superTileSize;
// final int stilesY = (tilesY + superTileSize -1)/superTileSize;
final int debug_stile = dbg_Y * stilesX + dbg_X;
final TilePlanes.PlaneData[][] new_planes = copyPlanes(mod_planes);
// final Thread[] threads = ImageDtt.newThreadArray(tileProcessor.threadsMax);
final Thread[] threads = ImageDtt.newThreadArray((debugLevel > 1)? 1 : st.tileProcessor.threadsMax);
final int numThreads = threads.length;
final double [] rslt_diffs = calc_diff ? new double [numThreads] : null; // all 0;
final AtomicInteger ai_numThread = new AtomicInteger(0);
final AtomicInteger ai = new AtomicInteger(0);
final String [] titles= {
"orig", // 0
"measured", // 1
"n0","n1","n2","n3","n4","n5","n6","n7", // 2 .. 9
"m0","m1","m2","m3","m4","m5","m6","m7","mm", // 10..18
"diff"}; // 19
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
double [][] dbg_img=null;
int numThread = ai_numThread.getAndIncrement(); // unique number of thread to write to rslt_diffs[numThread]
for (int nsTile0 = ai.getAndIncrement(); nsTile0 < mod_planes.length; nsTile0 = ai.getAndIncrement()) {
int sty0 = nsTile0 / stilesX;
int stx0 = nsTile0 % stilesX;
// int dl = ((debugLevel > -1) && (nsTile0 == debug_stile)) ? 1:0;
int dl = ((debugLevel > 1) && (nsTile0 == debug_stile)) ? 3: debugLevel;
if ( new_planes[nsTile0] != null) {
if (dl > 0){
System.out.println("planesSmoothStep nsTile0="+nsTile0);
dbg_img = new double [titles.length][];
}
int np0_min = (new_planes[nsTile0].length > 1) ? 1:0; // Modify if overall plane will be removed
for (int np0 = np0_min; np0 < new_planes[nsTile0].length; np0 ++){
TilePlanes.PlaneData this_new_plane = new_planes[nsTile0][np0];
if (this_new_plane == null){
System.out.println("Bug? new_planes["+nsTile0+"]["+np0+"] == null");
continue;
}
if (dl > 0) dbg_img[ 0] = this_new_plane.getSinglePlaneDisparity(false);
if (dl > 0) dbg_img[ 1] = measured_planes[nsTile0][np0].getSinglePlaneDisparity(false);
int [] neibs = this_new_plane.getNeibBest();
double [][] costs = new double[neibs.length][];
double [] weights = new double[neibs.length];
int cost_index = 2; // overall cost
double sum_rcosts = 0.0;
int non_zero = 0;
for (int dir = 0; dir < neibs.length; dir++) if (neibs[dir] >= 0) {
int stx = stx0 + dirsYX[dir][1];
int sty = sty0 + dirsYX[dir][0];
int nsTile = sty * stilesX + stx; // from where to get
TilePlanes.PlaneData other_plane = this_new_plane.getPlaneToThis(
mod_planes[nsTile][neibs[dir]], // neighbor, previous value
dl - 1); // debugLevel);
costs[dir] = getFitQualities(
// there probably be no sticks here after merging
false, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks"
this_new_plane, // TilePlanes.PlaneData plane1, // should belong to the same supertile (or be converted for one)
other_plane, // TilePlanes.PlaneData plane2,
Double.NaN, // double merged_ev, // if NaN will calculate assuming the same supertile
Double.NaN, // double merged_ev_eq, // if NaN will calculate assuming the same supertile
Double.NaN, // double merged_wev, // if NaN will calculate assuming the same supertile - for world
Double.NaN, // double merged_wev_eq, // if NaN will calculate assuming the same supertile - for world
nsTile0+":"+np0+"-"+dir, // String prefix,
0); // int debugLevel)
for (int i = 3; i < costs[dir].length; i++) costs[dir][i] *= costs[dir][2];
non_zero ++;
weights[dir] = 1.0/costs[dir][cost_index];
sum_rcosts += weights[dir];
}
for (int dir = 0; dir < neibs.length; dir++) if (neibs[dir] >= 0) {
weights[dir] *= non_zero/sum_rcosts; // average weight for active links will be 1.0
}
if (dl > 0) {
for (int dir = 0; dir <8; dir++)if (costs[dir] != null){
System.out.print(nsTile0+":"+np0+"-"+dir+" "+String.format(" weight= %6.3f ",weights[dir]));
for (int i = 0; i < costs[dir].length; i++){
System.out.print(String.format("%8.3f", costs[dir][i]));
// if (i < 3) System.out.print(String.format("%8.3f", costs[dir][i]));
// else System.out.print(String.format("%8.3f", costs[dir][i]*costs[dir][2]));
}
System.out.println();
}
System.out.println();
}
this_new_plane =this_new_plane.clone(); // not to change weight!
this_new_plane.setWeight(0.0); //
double num_merged = 0.0; // double to add fractional pull weight of the center
double true_num_merged = 0.0;
for (int dir = 0; dir < neibs.length; dir++){
if (neibs[dir] >= 0) {
int stx = stx0 + dirsYX[dir][1];
int sty = sty0 + dirsYX[dir][0];
int nsTile = sty * stilesX + stx; // from where to get
TilePlanes.PlaneData other_plane = this_new_plane.getPlaneToThis(
mod_planes[nsTile][neibs[dir]], // neighbor, previous value
dl - 1); // debugLevel);
if (dl > 0) dbg_img[ 2 + dir] = other_plane.getSinglePlaneDisparity(false);
if ((other_plane != null) && ((other_plane.getValue() <= plMaxEigen) || (plMaxEigen == 0))) { // TODO:
if (this_new_plane.getWeight() > 0.0){
this_new_plane = this_new_plane.mergePlaneToThis(
other_plane, // PlaneData otherPd,
weights[dir], // 1.0, // double scale_other,
// here it should be no power function for the weights
1.0, // double starWeightPwr, // Use this power of tile weight when calculating connection cost
false, // boolean ignore_weights,
true, // boolean sum_weights,
plPreferDisparity,
dl - 1); // int debugLevel)
} else {
this_new_plane.copyNeib(this_new_plane, other_plane); // keep neighbors of the original center plane
this_new_plane.copyStar(this_new_plane, other_plane);
this_new_plane = other_plane; // should increment num_merged
this_new_plane.scaleWeight(weights[dir]);
this_new_plane.invalidateCalculated();
}
if (this_new_plane != null){
num_merged += 1.0;
true_num_merged += 1.0;
// just for debug / calculate
this_new_plane.getWorldXYZ(0);
}
new_planes[nsTile0][np0] = this_new_plane;
if (dl > 0) dbg_img[10 + dir] = this_new_plane.getSinglePlaneDisparity(false);
} else if (plStopBad){ // just skip, not abandon? (set to false)
this_new_plane = null;
break;
}
}
}
if (this_new_plane != null) {
// average weight over participating directions, so the relative pull
// does not depend on number of neighbors
if ((num_merged > 0.0) && (this_new_plane != null) && (plNormPow > 1.0)){
double scale = Math.pow(num_merged, plNormPow);
this_new_plane.scaleWeight(1.0/scale);
num_merged /=scale;
true_num_merged /= scale;
}
if ( (plPull > 0.0) &&
(measured_planes != null) &&
(measured_planes[nsTile0] != null) &&
(measured_planes[nsTile0][np0] != null) &&
// ((measured_planes[nsTile0][np0].getValue() < plMaxEigen) || (plMaxEigen == 0))
((measured_planes[nsTile0][np0].getValue() < plMaxEigen) || (plMaxEigen == 0) ||
(this_new_plane == null) || (this_new_plane.getWeight() == 0.0)) // keep measured if impossible to merge
){ // merge with "measured"
if (this_new_plane.getWeight() > 0.0){
this_new_plane = this_new_plane.mergePlaneToThis(
measured_planes[nsTile0][np0], // PlaneData otherPd,
plPull, // double scale_other,
1.0, // double starWeightPwr, // Use this power of tile weight when calculating connection cost
false, // boolean ignore_weights,
true, // boolean sum_weights,
plPreferDisparity,
dl - 1); // int debugLevel)
if (this_new_plane != null){
num_merged += plPull; // num_merged was 1.0 and weight is averaged over all neighbors
true_num_merged += 1.0;
}
} else {
this_new_plane = measured_planes[nsTile0][np0].clone();
num_merged = 1.0;
true_num_merged = 1.0;
}
new_planes[nsTile0][np0] = this_new_plane;
if (dl > 0) dbg_img[18] = this_new_plane.getSinglePlaneDisparity(false);
}
if ((num_merged > 0.0) && (this_new_plane != null)){
this_new_plane.scaleWeight(1.0/num_merged);
// double true_num_merged = num_merged - plPull + 1;
this_new_plane.setNumPoints((int) (this_new_plane.getNumPoints()/true_num_merged));
}
// Revert if the result value is higher than imposed maximum
if ((this_new_plane.getValue() > plMaxEigen) && (plMaxEigen != 0)){ // TODO: Set more relaxed here?
if (dl > 0){
System.out.println("planesSmoothStep nsTile0="+nsTile0+" smoothed plane is too thick, using previous one");
dbg_img = new double [titles.length][];
}
this_new_plane = mod_planes[nsTile0][np0].clone();
new_planes[nsTile0][np0] = this_new_plane;
}
// Use non-exclusive
// just for debug / calculate
this_new_plane.getWorldXYZ(0);
// calculate largest disparity difference between old and new plane
if (rslt_diffs != null){ // filter out outliers here?
// get plane for both old and new, calc rms of diff
double [] oldPlane = mod_planes[nsTile0][np0].getSinglePlaneDisparity(
false); // use_NaN)
double [] newPlane = new_planes[nsTile0][np0].getSinglePlaneDisparity(
false); // use_NaN)
double s = 0.0;
for (int i = 0; i < oldPlane.length; i++){
double d = newPlane[i] - oldPlane[i];
s+= d*d;
}
s= Math.sqrt(s/oldPlane.length);
if (s > rslt_diffs[numThread]){
rslt_diffs[numThread] = s;
}
if (dl > 0) {
dbg_img[19] = new double[oldPlane.length];
for (int i = 0; i < oldPlane.length; i++){
dbg_img[19][i] = newPlane[i] - oldPlane[i];
}
if (debugLevel > 0) {
System.out.println("planesSmoothStep() nsTile0="+nsTile0+" rms = "+s);
}
}
if (debugLevel > -1) {
// if ((s > 5.0) || (dl > 0)){
if ((s > 5.0) || (dl > 0)){
System.out.println("planesSmoothStep() nsTile0="+nsTile0+":"+np0+
" num_merged="+num_merged + " rms = "+s+
" new_weight = "+new_planes[nsTile0][np0].getWeight()+
" old_weight = "+mod_planes[nsTile0][np0].getWeight()+
" new_value = "+new_planes[nsTile0][np0].getValue()+
" old_value = "+mod_planes[nsTile0][np0].getValue()+
" measured_weight = "+measured_planes[nsTile0][np0].getWeight()+
" measured_value = "+measured_planes[nsTile0][np0].getValue());
}
}
}
if ((dl > 0) && (debugLevel > 0)){
showDoubleFloatArrays sdfa_instance = new showDoubleFloatArrays(); // just for debugging?
sdfa_instance.showArrays(dbg_img, superTileSize, superTileSize, true, "smooth_step_x"+stx0+"_y"+sty0, titles);
}
} else { // if (this_new_plane != null)
this_new_plane = mod_planes[nsTile0][np0].clone();
new_planes[nsTile0][np0] = this_new_plane;
}
}
}
}
}
};
}
ImageDtt.startAndJoin(threads);
copyPlanes (new_planes, mod_planes); // copy back
if (rslt_diffs == null){
return Double.NaN;
}
double diff = 0.0;
for (int i = 0; (i < numThreads) ; i++) if (diff < rslt_diffs[i]) diff = rslt_diffs[i];
return diff; // return maximal difference
}
public TilePlanes.PlaneData[][] planesSmoothAndMerge(
final TilePlanes.PlaneData[][] planes,
final int max_num_tries,
final int debugLevel)
{
TilePlanes.PlaneData[][] planes_mod = null;
for (int num_merge_try = 0; num_merge_try < max_num_tries; num_merge_try ++ ) { // smooth and merge
planes_mod =copyPlanes(planes); // Start with fresh copied planes extracted measure data, nort smoothed
planes_mod = planesSmooth(
planes, // final TilePlanes.PlaneData[][] planes,
planes_mod, // TilePlanes.PlaneData[][] planes_mod,
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
// create costs for the modified planes
interPlaneCosts(
true, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks"
planes_mod, // final TilePlanes.PlaneData [][] planes,
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
setExclusiveLinks(
planes_mod, // final TilePlanes.PlaneData [][] planes,
// 2.5, //final double max_cost
getExNeibCost() * getExNeibSmooth(), // final double max_cost,
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
// once more after updating exclusive links
planes_mod = planesSmooth(
planes, // final TilePlanes.PlaneData[][] planes,
planes_mod, // TilePlanes.PlaneData[][] planes_mod,
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
interPlaneCosts(
true, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks"
planes_mod, // final TilePlanes.PlaneData [][] planes,
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
// recalculate links? more smooth?
setExclusiveLinks(
planes_mod, // final TilePlanes.PlaneData [][] planes,
getExNeibCost() * getExNeibSmooth(), // final double max_cost,
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
// just in case? Not yet needed
setNonExclusive(
true, // final boolean en_sticks, // allow merging with bad plates
planes_mod, // final TilePlanes.PlaneData [][] planes,
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
// see if some modified planes need to be merged (but merge originals)
// TODO: Stricter requirements for merging here than for original planes?
int [][][] merge_candidates = getMergeSameTileCandidates(
planes_mod, // final TilePlanes.PlaneData [][] planes,
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
boolean [][][] valid_candidates = overlapSameTileCandidates (
planes_mod, // final TilePlanes.PlaneData [][] planes,
merge_candidates, // final int [][][] merge_candidates,
// TODO: Use programmed parameters
0.2, // final double min_distance, //?
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
// Consider supertiles with conflicts, merge conflicting layers with relaxed requirements
Conflicts iconflicts0 = new Conflicts(st);
int [][][] conflicts0 = iconflicts0.detectTriangularConflicts(
debugLevel); // 1); // final int debugLevel)
// Will be used later, now just use before merge candidates are modified
int [][][] conflicting_candidates = filterPairsByConflicts(
planes_mod, // final TilePlanes.PlaneData [][] planes,
merge_candidates, // final int [][][] merge_candidates,
conflicts0); // final int [][][] conflicts)
// * Possible problem is that "normalizing" merge quality for low weights is not applicable for "star" plane that include neighhbors
// * Switch to a single "cost" function (costSameTileConnectionsAlt())
// Still - how to merge stray tiles that do not have neighbors/star? Still merge them "old way" (costSameTileConnections()) if at least 1 does not
// have a "star"
costSameTileConnections(
planes_mod, // final TilePlanes.PlaneData [][] planes,
merge_candidates, // final int [][][] merge_candidates,
valid_candidates, // final boolean [][][] valid_candidates, // will be updated
1.0, // final double relax,
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
costSameTileConnectionsAlt(
getMergeCostStar(), // relax_for_conflicts * 5.0, // final double threshold, //
getMergeCostNoStar(), //relax_for_conflicts * 10.0, // final double threshold_nostar,
planes_mod, // final TilePlanes.PlaneData [][] planes,
merge_candidates, // final int [][][] merge_candidates,
valid_candidates, // final boolean [][][] valid_candidates, // will be updated
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
int [][][] merge_groups = extractMergeSameTileGroups(
planes_mod, // final TilePlanes.PlaneData [][] planes,
merge_candidates, // final int [][][] merge_candidates,
valid_candidates, // boolean [][][] plane_overlaps,
1.0, // final double relax,
debugLevel +1, // final int debugLevel)
dbg_tileX,
dbg_tileY);
// Apply merging to the original planes, not the smoothed ones
int num_removed_by_merging = st.applyMergePlanes(
planes, // final TilePlanes.PlaneData[][] planes,
merge_groups, // final int [][][] merge_groups,
// parameters to generate ellipsoids
0.0, // 3, // final double disp_far, // minimal disparity to select (or NaN)
Double.NaN, // final double disp_near, // maximal disparity to select (or NaN)
plDispNorm, // final double dispNorm, // Normalize disparities to the average if above
0.0, // final double min_weight,
plMinPoints, // final int min_tiles,
// parameters to reduce outliers
plTargetEigen, // final double targetEigen, // = 0.1; // Remove outliers until main axis eigenvalue (possibly scaled by plDispNorm) gets below
plFractOutliers, // final double fractOutliers, // = 0.3; // Maximal fraction of outliers to remove
plMaxOutliers, // final int maxOutliers, // = 20; // Maximal number of outliers to remove
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
// TODO: above is very similar to conditionSuperTiles(). Combine
System.out.println("planesSmoothAndMerge(): try "+num_merge_try+ ": removed "+num_removed_by_merging+
((num_removed_by_merging>0)?" planes by merging, recalculating connections":""));
if (num_removed_by_merging == 0){ // re-calculate all links
// Consider supertiles with conflicts, merge conflicting layers with relaxed requirements
// just to list them
if (debugLevel > 0){
Conflicts conflicts0_stats = new Conflicts(
conflicts0,
st,
-1); // debugLevel);
}
System.out.println("Trying relaxed merging for conflicting plane pairs");
valid_candidates = overlapSameTileCandidates (
planes_mod, // final TilePlanes.PlaneData [][] planes,
conflicting_candidates, // final int [][][] merge_candidates,\\
0.4, // final double min_distance,
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
// again same sequence
// try to merge original (measured) planes, not smoothed ones ??
costSameTileConnections(
planes, // final TilePlanes.PlaneData [][] planes,
conflicting_candidates, // final int [][][] merge_candidates,
valid_candidates, // final boolean [][][] valid_candidates, // will be updated
getConflRelax(), //relax_for_conflicts, // final double relax,
debugLevel, // 2, // -1, // debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
costSameTileConnectionsAlt(
getConflRelax() * getMergeCostStar(), // relax_for_conflicts * 5.0, // final double threshold, //
getConflRelax() * getMergeCostNoStar(), //relax_for_conflicts * 10.0, // final double threshold_nostar,
planes, // final TilePlanes.PlaneData [][] planes,
conflicting_candidates, // final int [][][] merge_candidates,
valid_candidates, // final boolean [][][] valid_candidates, // will be updated
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
merge_groups = extractMergeSameTileGroups(
planes, // final TilePlanes.PlaneData [][] planes,
conflicting_candidates, // final int [][][] merge_candidates,
valid_candidates, // boolean [][][] plane_overlaps,
getConflRelax(), // relax_for_conflicts, // final double relax,
debugLevel + 1, // final int debugLevel)
dbg_tileX,
dbg_tileY);
num_removed_by_merging = st.applyMergePlanes(
planes, // final TilePlanes.PlaneData[][] planes,
merge_groups, // final int [][][] merge_groups,
// parameters to generate ellipsoids
0.0, // 3, // final double disp_far, // minimal disparity to select (or NaN)
Double.NaN, // final double disp_near, // maximal disparity to select (or NaN)
plDispNorm, // final double dispNorm, // Normalize disparities to the average if above
0.0, // final double min_weight,
plMinPoints, // final int min_tiles,
// parameters to reduce outliers
plTargetEigen, // final double targetEigen, // = 0.1; // Remove outliers until main axis eigenvalue (possibly scaled by plDispNorm) gets below
plFractOutliers, // final double fractOutliers, // = 0.3; // Maximal fraction of outliers to remove
plMaxOutliers, // final int maxOutliers, // = 20; // Maximal number of outliers to remove
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
System.out.println("planesSmoothAndMerge(): try "+num_merge_try+ ": removed "+num_removed_by_merging+
((num_removed_by_merging>0)?" conflicting planes by merging, recalculating connections":""));
if ( num_merge_try >= max_num_tries) {
System.out.println("Exceeded maximal number of iterations, beaking anyway...");
break;
}
if (num_removed_by_merging == 0){ // re-calculate all links
break;
}
}
// the following is not actually needed, just to keep measured (not smoothed) plane data consistent ?
// Do the same as in conditionSuperTiles before smoothing again
// Some actions below may be duplicate?
matchPlanes(
planes, // final TilePlanes.PlaneData [][] planes,
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
interPlaneCosts( //
true, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks"
planes, // final TilePlanes.PlaneData [][] planes,
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
filterNeighborPlanes(
planes, // final TilePlanes.PlaneData [][] planes,
true, // final boolean merge_low_eigen,
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
setExclusiveLinks( // stricter? why?
planes, // final TilePlanes.PlaneData [][] planes,
getExNeibCost()*getExNeibSmooth(), // final double max_cost,
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
setNonExclusive(
true, // final boolean en_sticks, // allow merging with bad plates
planes, // final TilePlanes.PlaneData [][] planes,
debugLevel, // final int debugLevel)
dbg_tileX,
dbg_tileY);
}
return planes_mod;
}
} }
...@@ -1413,7 +1413,9 @@ public class SuperTiles{ ...@@ -1413,7 +1413,9 @@ public class SuperTiles{
final int stilesX = (tilesX + superTileSize -1)/superTileSize; final int stilesX = (tilesX + superTileSize -1)/superTileSize;
final int stilesY = (tilesY + superTileSize -1)/superTileSize; final int stilesY = (tilesY + superTileSize -1)/superTileSize;
final int nStiles = stilesX * stilesY; final int nStiles = stilesX * stilesY;
final Thread[] threads = ImageDtt.newThreadArray(tileProcessor.threadsMax); // final Thread[] threads = ImageDtt.newThreadArray(tileProcessor.threadsMax);
final Thread[] threads = ImageDtt.newThreadArray((debugLevel > 1)? 1 : tileProcessor.threadsMax);
final AtomicInteger ai = new AtomicInteger(0); final AtomicInteger ai = new AtomicInteger(0);
final int debug_stile = (debugLevel > -1)? (dbg_Y * stilesX + dbg_X):-1; final int debug_stile = (debugLevel > -1)? (dbg_Y * stilesX + dbg_X):-1;
// TODO: Remove when promoting PlaneData // TODO: Remove when promoting PlaneData
...@@ -1423,7 +1425,9 @@ public class SuperTiles{ ...@@ -1423,7 +1425,9 @@ public class SuperTiles{
threads[ithread] = new Thread() { threads[ithread] = new Thread() {
public void run() { public void run() {
for (int nsTile = ai.getAndIncrement(); nsTile < nStiles; nsTile = ai.getAndIncrement()) { for (int nsTile = ai.getAndIncrement(); nsTile < nStiles; nsTile = ai.getAndIncrement()) {
if (nsTile == debug_stile){ // int dl = (nsTile == debug_stile) ? 3 : 0;
int dl = ((debugLevel > 1) && (nsTile == debug_stile)) ? 3: debugLevel;
if (dl > 1){
System.out.println("getPlaneDispStrengths(): nsTile="+nsTile); System.out.println("getPlaneDispStrengths(): nsTile="+nsTile);
} }
int stileY = nsTile / stilesX; int stileY = nsTile / stilesX;
...@@ -1438,7 +1442,7 @@ public class SuperTiles{ ...@@ -1438,7 +1442,7 @@ public class SuperTiles{
measuredLayers, // MeasuredLayers measuredLayers, measuredLayers, // MeasuredLayers measuredLayers,
plPreferDisparity); // boolean preferDisparity) plPreferDisparity); // boolean preferDisparity)
int dl = (nsTile == debug_stile) ? 3 : 0;
plane_disp_strength[nsTile] = new double[measuredLayers.getNumLayers()][][]; plane_disp_strength[nsTile] = new double[measuredLayers.getNumLayers()][][];
...@@ -1541,7 +1545,8 @@ public class SuperTiles{ ...@@ -1541,7 +1545,8 @@ public class SuperTiles{
final int stilesX = (tilesX + superTileSize -1)/superTileSize; final int stilesX = (tilesX + superTileSize -1)/superTileSize;
final int stilesY = (tilesY + superTileSize -1)/superTileSize; final int stilesY = (tilesY + superTileSize -1)/superTileSize;
final int nStiles = stilesX * stilesY; final int nStiles = stilesX * stilesY;
final Thread[] threads = ImageDtt.newThreadArray(tileProcessor.threadsMax); // final Thread[] threads = ImageDtt.newThreadArray(tileProcessor.threadsMax);
final Thread[] threads = ImageDtt.newThreadArray((debugLevel > 1)? 1 :tileProcessor.threadsMax);
final AtomicInteger ai = new AtomicInteger(0); final AtomicInteger ai = new AtomicInteger(0);
this.planes = new TilePlanes.PlaneData[nStiles][]; this.planes = new TilePlanes.PlaneData[nStiles][];
final int debug_stile = (debugLevel > -1)? (dbg_Y * stilesX + dbg_X):-1; final int debug_stile = (debugLevel > -1)? (dbg_Y * stilesX + dbg_X):-1;
...@@ -1553,12 +1558,13 @@ public class SuperTiles{ ...@@ -1553,12 +1558,13 @@ public class SuperTiles{
public void run() { public void run() {
for (int nsTile = ai.getAndIncrement(); nsTile < nStiles; nsTile = ai.getAndIncrement()) { for (int nsTile = ai.getAndIncrement(); nsTile < nStiles; nsTile = ai.getAndIncrement()) {
if (disparity_strengths[nsTile] != null){ if (disparity_strengths[nsTile] != null){
if (nsTile == debug_stile){ int dl = ((debugLevel > 1) && (nsTile == debug_stile)) ? 3: debugLevel;
if (dl > 1){
System.out.println("dispClusterize(): nsTile="+nsTile); System.out.println("dispClusterize(): nsTile="+nsTile);
} }
int stileY = nsTile / stilesX; int stileY = nsTile / stilesX;
int stileX = nsTile % stilesX; int stileX = nsTile % stilesX;
int dl = (nsTile == debug_stile) ? 3 : 0; // int dl = (nsTile == debug_stile) ? 3 : 0;
double[][][] disp_strength = new double[measuredLayers.getNumLayers()][][]; double[][][] disp_strength = new double[measuredLayers.getNumLayers()][][];
...@@ -1853,7 +1859,8 @@ public class SuperTiles{ ...@@ -1853,7 +1859,8 @@ public class SuperTiles{
final int stilesX = (tilesX + superTileSize -1)/superTileSize; final int stilesX = (tilesX + superTileSize -1)/superTileSize;
final int stilesY = (tilesY + superTileSize -1)/superTileSize; final int stilesY = (tilesY + superTileSize -1)/superTileSize;
final int nStiles = stilesX * stilesY; final int nStiles = stilesX * stilesY;
final Thread[] threads = ImageDtt.newThreadArray(tileProcessor.threadsMax); // final Thread[] threads = ImageDtt.newThreadArray(tileProcessor.threadsMax);
final Thread[] threads = ImageDtt.newThreadArray((debugLevel > 1)? 1 : tileProcessor.threadsMax);
final AtomicInteger ai = new AtomicInteger(0); final AtomicInteger ai = new AtomicInteger(0);
this.planes = new TilePlanes.PlaneData[nStiles][]; this.planes = new TilePlanes.PlaneData[nStiles][];
final int debug_stile = (debugLevel > -1)? (dbg_Y * stilesX + dbg_X):-1; final int debug_stile = (debugLevel > -1)? (dbg_Y * stilesX + dbg_X):-1;
...@@ -1877,7 +1884,7 @@ public class SuperTiles{ ...@@ -1877,7 +1884,7 @@ public class SuperTiles{
cltPass3d.getDisparity(), // double [] disparity, cltPass3d.getDisparity(), // double [] disparity,
cltPass3d.getStrength(), // double [] strength, cltPass3d.getStrength(), // double [] strength,
grown_selection); // null); // boolean [] selection) // may be null grown_selection); // null); // boolean [] selection) // may be null
if (debugLevel > -1) { if (debugLevel > 0) {
String [] titles = {"d0","s0","d1","s1","d2","s2","d3","s3","s","d","selection"}; String [] titles = {"d0","s0","d1","s1","d2","s2","d3","s3","s","d","selection"};
boolean [] dbg_sel= grown_selection; // cltPass3d.getSelected(); boolean [] dbg_sel= grown_selection; // cltPass3d.getSelected();
double [][] dbg_img = new double [titles.length][]; double [][] dbg_img = new double [titles.length][];
...@@ -1939,7 +1946,7 @@ public class SuperTiles{ ...@@ -1939,7 +1946,7 @@ public class SuperTiles{
final double [][][] mmm_hor = getMaxMinMax( final double [][][] mmm_hor = getMaxMinMax(
hor_disp_strength, // final double [][][][] disparity_strength, // pre-calculated disparity/strength [per super-tile][per-measurement layer][2][tiles] or null hor_disp_strength, // final double [][][][] disparity_strength, // pre-calculated disparity/strength [per super-tile][per-measurement layer][2][tiles] or null
null); // final boolean [][] tile_sel // null or per-measurement layer, per-tile selection. For each layer null - do not use, {} - use all null); // final boolean [][] tile_sel // null or per-measurement layer, per-tile selection. For each layer null - do not use, {} - use all
if (debugLevel > -1) { if (debugLevel > 0) {
dbg_hist[1] = showDisparityHistogram(); dbg_hist[1] = showDisparityHistogram();
dbg_hist[3] = showMaxMinMax(); dbg_hist[3] = showMaxMinMax();
} }
...@@ -1960,7 +1967,7 @@ public class SuperTiles{ ...@@ -1960,7 +1967,7 @@ public class SuperTiles{
sdfa_instance.showArrays(dbg_hist, hist_width0, hist_height0, true, "vert_hor_histograms_"+pass,dbg_hist_titles); sdfa_instance.showArrays(dbg_hist, hist_width0, hist_height0, true, "vert_hor_histograms_"+pass,dbg_hist_titles);
} }
// try to independently (same selections) clusterize both ways // try to independently (same selections) clusterize both ways
if (debugLevel > -1){ if (debugLevel > 0){
System.out.println("initialDiscriminateTiles(): before new_planes_hor, pass =" + (pass + 1) + " ( of "+max_tries+" )"); System.out.println("initialDiscriminateTiles(): before new_planes_hor, pass =" + (pass + 1) + " ( of "+max_tries+" )");
} }
...@@ -1974,11 +1981,11 @@ public class SuperTiles{ ...@@ -1974,11 +1981,11 @@ public class SuperTiles{
plMinPoints, // final int plMinPoints, // = 5; // Minimal number of points for plane detection plMinPoints, // final int plMinPoints, // = 5; // Minimal number of points for plane detection
smallDiff, // final double smallDiff, // = 0.4; // Consider merging initial planes if disparity difference below smallDiff, // final double smallDiff, // = 0.4; // Consider merging initial planes if disparity difference below
highMix, // final double highMix, //stHighMix = 0.4; // Consider merging initial planes if jumps between ratio above highMix, // final double highMix, //stHighMix = 0.4; // Consider merging initial planes if jumps between ratio above
1, // debugLevel, debugLevel, // 1, // debugLevel,
dbg_X, dbg_X,
dbg_Y); dbg_Y);
if (debugLevel > -1){ if (debugLevel > 0){
System.out.println("initialDiscriminateTiles(): before new_planes_vert, pass =" + (pass + 1) + " ( of "+max_tries+" )"); System.out.println("initialDiscriminateTiles(): before new_planes_vert, pass =" + (pass + 1) + " ( of "+max_tries+" )");
} }
final boolean [][][][] new_planes_vert = dispClusterize( final boolean [][][][] new_planes_vert = dispClusterize(
...@@ -1991,7 +1998,7 @@ public class SuperTiles{ ...@@ -1991,7 +1998,7 @@ public class SuperTiles{
plMinPoints, // final int plMinPoints, // = 5; // Minimal number of points for plane detection plMinPoints, // final int plMinPoints, // = 5; // Minimal number of points for plane detection
smallDiff, // final double smallDiff, // = 0.4; // Consider merging initial planes if disparity difference below smallDiff, // final double smallDiff, // = 0.4; // Consider merging initial planes if disparity difference below
highMix, // final double highMix, //stHighMix = 0.4; // Consider merging initial planes if jumps between ratio above highMix, // final double highMix, //stHighMix = 0.4; // Consider merging initial planes if jumps between ratio above
2, // debugLevel, debugLevel, // 2, // debugLevel,
dbg_X, dbg_X,
dbg_Y); dbg_Y);
...@@ -2001,13 +2008,12 @@ public class SuperTiles{ ...@@ -2001,13 +2008,12 @@ public class SuperTiles{
threads[ithread] = new Thread() { threads[ithread] = new Thread() {
public void run() { public void run() {
for (int nsTile = ai.getAndIncrement(); nsTile < nStiles; nsTile = ai.getAndIncrement()) { for (int nsTile = ai.getAndIncrement(); nsTile < nStiles; nsTile = ai.getAndIncrement()) {
int dl = ((debugLevel > -1) && (nsTile == debug_stile)) ? 3 : 0; // int dl = ((debugLevel > -1) && (nsTile == debug_stile)) ? 3 : 0;
int dl = ((debugLevel > 1) && (nsTile == debug_stile)) ? 3: debugLevel;
if (dl > 0){ if (dl > 0){
System.out.println("initialDiscriminateTiles() selecting: nsTile="+nsTile); System.out.println("initialDiscriminateTiles() selecting: nsTile="+nsTile);
} }
// int stileY = nsTile / stilesX;
// int stileX = nsTile % stilesX;
// int [] sTiles = {stileX, stileY};
double [][][][] ds = {vert_disp_strength[nsTile],hor_disp_strength[nsTile]}; double [][][][] ds = {vert_disp_strength[nsTile],hor_disp_strength[nsTile]};
boolean [][][][] sels_all = {new_planes_vert[nsTile],new_planes_hor[nsTile]}; // make possible to iterate boolean [][][][] sels_all = {new_planes_vert[nsTile],new_planes_hor[nsTile]}; // make possible to iterate
class SelStrength{ class SelStrength{
...@@ -2189,7 +2195,9 @@ public class SuperTiles{ ...@@ -2189,7 +2195,9 @@ public class SuperTiles{
threads[ithread] = new Thread() { threads[ithread] = new Thread() {
public void run() { public void run() {
for (int nsTile = ai.getAndIncrement(); nsTile < nStiles; nsTile = ai.getAndIncrement()) { for (int nsTile = ai.getAndIncrement(); nsTile < nStiles; nsTile = ai.getAndIncrement()) {
int dl = ((debugLevel > -1) && (nsTile == debug_stile)) ? 3 : 1; // int dl = ((debugLevel > -1) && (nsTile == debug_stile)) ? 3 : 1;
int dl = ((debugLevel > 1) && (nsTile == debug_stile)) ? 3: debugLevel;
if (dl > 1){ if (dl > 1){
System.out.println("refineDiscriminateTiles() selecting: nsTile="+nsTile); System.out.println("refineDiscriminateTiles() selecting: nsTile="+nsTile);
} }
...@@ -2240,7 +2248,7 @@ public class SuperTiles{ ...@@ -2240,7 +2248,7 @@ public class SuperTiles{
break; break;
} }
// merging suggested plane pair // merging suggested plane pair
if (debugLevel > -1) { if (debugLevel > 0) {
System.out.println("refineDiscriminateTiles(): nsTile="+nsTile+" merging pair ["+merge_planes[0]+","+merge_planes[1]+"]"); System.out.println("refineDiscriminateTiles(): nsTile="+nsTile+" merging pair ["+merge_planes[0]+","+merge_planes[1]+"]");
} }
TilePlanes.PlaneData [] new_planes = new TilePlanes.PlaneData [these_planes.length -1]; TilePlanes.PlaneData [] new_planes = new TilePlanes.PlaneData [these_planes.length -1];
...@@ -2257,7 +2265,7 @@ public class SuperTiles{ ...@@ -2257,7 +2265,7 @@ public class SuperTiles{
false, // boolean ignore_weights, false, // boolean ignore_weights,
true, // boolean sum_weights, true, // boolean sum_weights,
these_planes[merge_planes[0]].getPreferDisparity(), // preferDisparity, these_planes[merge_planes[0]].getPreferDisparity(), // preferDisparity,
dl-1); // int debugLevel) dl-2); // int debugLevel)
// combine tile selection - if next time pd0.reDiscriminateTiles() will fail, it will // combine tile selection - if next time pd0.reDiscriminateTiles() will fail, it will
// use old selections, we need to provide them (otherwise will use selection from the first plane) // use old selections, we need to provide them (otherwise will use selection from the first plane)
plane1.orMeasSelection(these_planes[merge_planes[1]].getMeasSelection()); plane1.orMeasSelection(these_planes[merge_planes[1]].getMeasSelection());
...@@ -2352,7 +2360,9 @@ public class SuperTiles{ ...@@ -2352,7 +2360,9 @@ public class SuperTiles{
threads[ithread] = new Thread() { threads[ithread] = new Thread() {
public void run() { public void run() {
for (int nsTile = ai.getAndIncrement(); nsTile < nStiles; nsTile = ai.getAndIncrement()) { for (int nsTile = ai.getAndIncrement(); nsTile < nStiles; nsTile = ai.getAndIncrement()) {
if (nsTile == debug_stile){ // int dl = ((debugLevel > 1) && (nsTile == debug_stile)) ? 3: debugLevel;
int dl = ((debugLevel > 1) && (nsTile == debug_stile)) ? 3: debugLevel;
if (dl > 1){
System.out.println("createPlanesFromSelections(): nsTile="+nsTile); System.out.println("createPlanesFromSelections(): nsTile="+nsTile);
} }
if (plane_selections[nsTile] != null) { if (plane_selections[nsTile] != null) {
...@@ -2362,7 +2372,8 @@ public class SuperTiles{ ...@@ -2362,7 +2372,8 @@ public class SuperTiles{
int stileY = nsTile / stilesX; int stileY = nsTile / stilesX;
int stileX = nsTile % stilesX; int stileX = nsTile % stilesX;
int [] sTiles = {stileX, stileY}; int [] sTiles = {stileX, stileY};
int dl = (nsTile == debug_stile) ? 3 : 0; // int dl = (nsTile == debug_stile) ? 3 : 0;
result_planes[nsTile] = null; result_planes[nsTile] = null;
// first make a plane from all tiles // first make a plane from all tiles
...@@ -2392,7 +2403,7 @@ public class SuperTiles{ ...@@ -2392,7 +2403,7 @@ public class SuperTiles{
dl); // int debugLevel); dl); // int debugLevel);
if ((st_planes != null) && (!st_planes.isEmpty())){ if ((st_planes != null) && (!st_planes.isEmpty())){
if (dl > 0){ if (dl > 1){
System.out.println("======= createPlanesFromSelections(): nsTile="+nsTile+" detecting bridges =========="); System.out.println("======= createPlanesFromSelections(): nsTile="+nsTile+" detecting bridges ==========");
} }
boolean [][][] split_sels = pd0.filterBridges( boolean [][][] split_sels = pd0.filterBridges(
...@@ -2401,7 +2412,7 @@ public class SuperTiles{ ...@@ -2401,7 +2412,7 @@ public class SuperTiles{
3, // int max_grow_far, 3, // int max_grow_far,
dl); // int debugLevel) dl); // int debugLevel)
if (split_sels !=null){ if (split_sels !=null){
if (dl > -1){ if (dl > 1){
System.out.println("======= createPlanesFromSelections(): nsTile="+nsTile+" removing bridges =========="); System.out.println("======= createPlanesFromSelections(): nsTile="+nsTile+" removing bridges ==========");
} }
if (dl > 2) { if (dl > 2) {
...@@ -2431,7 +2442,7 @@ public class SuperTiles{ ...@@ -2431,7 +2442,7 @@ public class SuperTiles{
smplSide, // int smplSide, // = 2; // Sample size (side of a square) smplSide, // int smplSide, // = 2; // Sample size (side of a square)
smplNum, // int smplNum, // = 3; // Number after removing worst smplNum, // int smplNum, // = 3; // Number after removing worst
smplRms, // double smplRms, // = 0.1; // Maximal RMS of the remaining tiles in a sample smplRms, // double smplRms, // = 0.1; // Maximal RMS of the remaining tiles in a sample
dl); // int debugLevel); dl - 1); // int debugLevel);
} }
} }
...@@ -2449,7 +2460,7 @@ public class SuperTiles{ ...@@ -2449,7 +2460,7 @@ public class SuperTiles{
if (LOWEST_PLANE(2) > 0) st_planes.add(0, st_planes.get(0)); // insert dummy at pos 0; if (LOWEST_PLANE(2) > 0) st_planes.add(0, st_planes.get(0)); // insert dummy at pos 0;
result_planes[nsTile] = st_planes.toArray(new TilePlanes.PlaneData[0] ); result_planes[nsTile] = st_planes.toArray(new TilePlanes.PlaneData[0] );
if (LOWEST_PLANE(2) > 0) result_planes[nsTile][0] = null; // remove dummy if (LOWEST_PLANE(2) > 0) result_planes[nsTile][0] = null; // remove dummy
if (dl >0){ if (dl >1){
System.out.println("createPlanesFromSelections(): nsTile="+nsTile); System.out.println("createPlanesFromSelections(): nsTile="+nsTile);
} }
if (dl > 2) { if (dl > 2) {
...@@ -2575,7 +2586,7 @@ public class SuperTiles{ ...@@ -2575,7 +2586,7 @@ public class SuperTiles{
smplNum, // final int smplNum, // = 3; // Number after removing worst smplNum, // final int smplNum, // = 3; // Number after removing worst
smplRms, // final double smplRms, // = 0.1; // Maximal RMS of the remaining tiles in a sample smplRms, // final double smplRms, // = 0.1; // Maximal RMS of the remaining tiles in a sample
debugLevel + 2, // 1, // final int debugLevel, debugLevel, // + 2, // 1, // final int debugLevel,
dbg_X, // final int dbg_X, dbg_X, // final int dbg_X,
dbg_Y); // final int dbg_Y) dbg_Y); // final int dbg_Y)
this.planes = new_planes; // save as "measured" (as opposed to "smoothed" by neighbors) planes this.planes = new_planes; // save as "measured" (as opposed to "smoothed" by neighbors) planes
...@@ -2692,7 +2703,7 @@ public class SuperTiles{ ...@@ -2692,7 +2703,7 @@ public class SuperTiles{
plDiscrXMedian, //final double plDiscrXMedian, // = 1.5; // Remove outliers from the final selection that have distance more than scaled median plDiscrXMedian, //final double plDiscrXMedian, // = 1.5; // Remove outliers from the final selection that have distance more than scaled median
2, // debugLevel, // final int debugLevel, debugLevel, // final int debugLevel,
dbg_X, // final int dbg_X, dbg_X, // final int dbg_X,
dbg_Y); // final int dbg_Y) dbg_Y); // final int dbg_Y)
...@@ -2734,7 +2745,7 @@ public class SuperTiles{ ...@@ -2734,7 +2745,7 @@ public class SuperTiles{
smplNum, // final int smplNum, // = 3; // Number after removing worst smplNum, // final int smplNum, // = 3; // Number after removing worst
smplRms, // final double smplRms, // = 0.1; // Maximal RMS of the remaining tiles in a sample smplRms, // final double smplRms, // = 0.1; // Maximal RMS of the remaining tiles in a sample
debugLevel + 2, // 1, // final int debugLevel, debugLevel, // + 2, // 1, // final int debugLevel,
dbg_X, // final int dbg_X, dbg_X, // final int dbg_X,
dbg_Y); // final int dbg_Y) dbg_Y); // final int dbg_Y)
// combine old and new planes (refineDiscriminateTiles will return null for the supertile if failed to re-disciminate) // combine old and new planes (refineDiscriminateTiles will return null for the supertile if failed to re-disciminate)
...@@ -5844,14 +5855,14 @@ public class SuperTiles{ ...@@ -5844,14 +5855,14 @@ public class SuperTiles{
public TilePlanes.PlaneData[][] copyPlanes( public TilePlanes.PlaneData[][] copyPlanes_old(
TilePlanes.PlaneData[][] src_planes) TilePlanes.PlaneData[][] src_planes)
{ {
TilePlanes.PlaneData[][] dst_planes = new TilePlanes.PlaneData[src_planes.length][]; TilePlanes.PlaneData[][] dst_planes = new TilePlanes.PlaneData[src_planes.length][];
return copyPlanes(src_planes, dst_planes); return copyPlanes_old(src_planes, dst_planes);
} }
public TilePlanes.PlaneData[][] copyPlanes( public TilePlanes.PlaneData[][] copyPlanes_old(
final TilePlanes.PlaneData[][] src_planes, final TilePlanes.PlaneData[][] src_planes,
final TilePlanes.PlaneData[][] dst_planes) final TilePlanes.PlaneData[][] dst_planes)
{ {
...@@ -5881,7 +5892,7 @@ public class SuperTiles{ ...@@ -5881,7 +5892,7 @@ public class SuperTiles{
return dst_planes; return dst_planes;
} }
public TilePlanes.PlaneData[][] planesSmooth( public TilePlanes.PlaneData[][] planesSmooth_old(
final LinkPlanes lp, final LinkPlanes lp,
final double meas_pull,// relative pull of the original (measured) plane with respect to the average of the neighbors final double meas_pull,// relative pull of the original (measured) plane with respect to the average of the neighbors
final double maxValue, // do not combine with too bad planes with primary eigenvalue above this value ( 0 any OK) final double maxValue, // do not combine with too bad planes with primary eigenvalue above this value ( 0 any OK)
...@@ -5896,10 +5907,10 @@ public class SuperTiles{ ...@@ -5896,10 +5907,10 @@ public class SuperTiles{
final int dbg_Y) final int dbg_Y)
{ {
if (this.planes_mod == null){ if (this.planes_mod == null){
this.planes_mod =copyPlanes(this.planes); // make always (for now) ********************* this.planes_mod =copyPlanes_old(this.planes); // make always (for now) *********************
} }
for (int pass = 0; pass < num_passes; pass++){ for (int pass = 0; pass < num_passes; pass++){
double diff = planesSmoothStep( double diff = planesSmoothStep_old(
lp, // LinkPlanes lp, lp, // LinkPlanes lp,
meas_pull, // relative pull of the original (measured) plane with respect to the average of the neighbors meas_pull, // relative pull of the original (measured) plane with respect to the average of the neighbors
maxValue, // final double maxValue, // do not combine with too bad planes maxValue, // final double maxValue, // do not combine with too bad planes
...@@ -5926,7 +5937,7 @@ public class SuperTiles{ ...@@ -5926,7 +5937,7 @@ public class SuperTiles{
return this.planes_mod; return this.planes_mod;
} }
public double planesSmoothStep( public double planesSmoothStep_old(
final LinkPlanes lp, final LinkPlanes lp,
final double meas_pull,// relative pull of the original (measured) plane with respect to the average of the neighbors final double meas_pull,// relative pull of the original (measured) plane with respect to the average of the neighbors
final double maxValue, // do not combine with too bad planes, do not merge if result is above final double maxValue, // do not combine with too bad planes, do not merge if result is above
...@@ -5947,7 +5958,7 @@ public class SuperTiles{ ...@@ -5947,7 +5958,7 @@ public class SuperTiles{
final int stilesX = (tilesX + superTileSize -1)/superTileSize; final int stilesX = (tilesX + superTileSize -1)/superTileSize;
// final int stilesY = (tilesY + superTileSize -1)/superTileSize; // final int stilesY = (tilesY + superTileSize -1)/superTileSize;
final int debug_stile = dbg_Y * stilesX + dbg_X; final int debug_stile = dbg_Y * stilesX + dbg_X;
final TilePlanes.PlaneData[][] new_planes = copyPlanes(mod_planes); final TilePlanes.PlaneData[][] new_planes = copyPlanes_old(mod_planes);
final Thread[] threads = ImageDtt.newThreadArray(tileProcessor.threadsMax); final Thread[] threads = ImageDtt.newThreadArray(tileProcessor.threadsMax);
final int numThreads = threads.length; final int numThreads = threads.length;
final double [] rslt_diffs = calc_diff ? new double [numThreads] : null; // all 0; final double [] rslt_diffs = calc_diff ? new double [numThreads] : null; // all 0;
...@@ -6190,7 +6201,7 @@ public class SuperTiles{ ...@@ -6190,7 +6201,7 @@ public class SuperTiles{
}; };
} }
ImageDtt.startAndJoin(threads); ImageDtt.startAndJoin(threads);
copyPlanes (new_planes, mod_planes); // copy back copyPlanes_old (new_planes, mod_planes); // copy back
if (rslt_diffs == null){ if (rslt_diffs == null){
return Double.NaN; return Double.NaN;
} }
...@@ -6294,9 +6305,27 @@ public class SuperTiles{ ...@@ -6294,9 +6305,27 @@ public class SuperTiles{
return split_lines; return split_lines;
} }
/**
* Apply same supertile planes merge by combining tiles and re-generating ellipsoids by diagonalizing
* covariance matrices. Some outliers may be removed after merge
* @param planes per supertile, per plane - array of supertile instances - will be modified =by merge
* @param merge_groups per-supertile group sets for merging. Each group set is an array of groups. Each group is an array
* of plane indices
* Parameters to generate planes (ellipsoids):
* @param disp_far disparity lower limit (Double.NaN - any)
* @param disp_near disparity upper limit (Double.NaN - any)
* @param dispNorm disparity normalization value (when average disparity is above, difference is proportionally reduced)
* @param min_weight minimal tile strength to be used
* @param min_tiles minimal number of tiles to generate ellipsoid
* Parameters for outlier removal:
* @param targetEigen target main eigenvalue (thickness in disparity space)
* @param fractOutliers maximal fraction of all tiles to be removed as outliers
* @param maxOutliers maximal absolute number of outliers to be removed from each plane (ellipsoid)
* @param debugLevel debug level
* @param dbg_X tile x-index for detailed debug data
* @param dbg_Y tile y-index for detailed debug data
* @return total number of plane groups merged
*/
public int applyMergePlanes( public int applyMergePlanes(
final TilePlanes.PlaneData[][] planes, final TilePlanes.PlaneData[][] planes,
final int [][][] merge_groups, final int [][][] merge_groups,
...@@ -6319,14 +6348,16 @@ public class SuperTiles{ ...@@ -6319,14 +6348,16 @@ public class SuperTiles{
final int superTileSize = tileProcessor.getSuperTileSize(); final int superTileSize = tileProcessor.getSuperTileSize();
final int stilesX = (tilesX + superTileSize -1)/superTileSize; final int stilesX = (tilesX + superTileSize -1)/superTileSize;
final int debug_stile = dbg_Y * stilesX + dbg_X; final int debug_stile = dbg_Y * stilesX + dbg_X;
final Thread[] threads = ImageDtt.newThreadArray(tileProcessor.threadsMax); final Thread[] threads = ImageDtt.newThreadArray((debugLevel > 1)? 1 : tileProcessor.threadsMax);
final AtomicInteger ai = new AtomicInteger(0); final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) { for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() { threads[ithread] = new Thread() {
public void run() { public void run() {
for (int nsTile = ai.getAndIncrement(); nsTile < planes.length; nsTile = ai.getAndIncrement()) { for (int nsTile = ai.getAndIncrement(); nsTile < planes.length; nsTile = ai.getAndIncrement()) {
int dl = ((debugLevel > -1) && (nsTile == debug_stile)) ? 4:0; // int dl = ((debugLevel > -1) && (nsTile == debug_stile)) ? 4:0;
int dl = ((debugLevel > 1) && (nsTile == debug_stile)) ? 4: debugLevel;
if (merge_groups[nsTile] != null){ if (merge_groups[nsTile] != null){
// first merge all to the lowest plane (they are ordered), then re-order remaining planes // first merge all to the lowest plane (they are ordered), then re-order remaining planes
for (int ng = 0; ng < merge_groups[nsTile].length; ng++) { for (int ng = 0; ng < merge_groups[nsTile].length; ng++) {
......
...@@ -4969,7 +4969,7 @@ public class TilePlanes { ...@@ -4969,7 +4969,7 @@ public class TilePlanes {
} }
} }
if (((debugLevel > -1) && ((debugLevel > 0) || (max_attr_corr > attractionCorrMax)) )&& (num_planes > 1)){ if (((debugLevel > 0) && ((debugLevel > 1) || (max_attr_corr > attractionCorrMax)) )&& (num_planes > 1)){
String dbg_s = "refineDiscriminateTiles() plane attraction correlation for "+prefix+": maximal="+max_attr_corr; String dbg_s = "refineDiscriminateTiles() plane attraction correlation for "+prefix+": maximal="+max_attr_corr;
for (int np = 0; np < num_planes; np++) { for (int np = 0; np < num_planes; np++) {
for (int np1 = np + 1; np1 < num_planes; np1++) { for (int np1 = np + 1; np1 < num_planes; np1++) {
......
...@@ -3262,7 +3262,7 @@ public class TileProcessor { ...@@ -3262,7 +3262,7 @@ public class TileProcessor {
return true; return true;
} }
public void conditionSuperTiles( public void conditionSuperTilesOld(
EyesisCorrectionParameters.CLTParameters clt_parameters, EyesisCorrectionParameters.CLTParameters clt_parameters,
GeometryCorrection geometryCorrection, GeometryCorrection geometryCorrection,
SuperTiles st, SuperTiles st,
...@@ -3283,7 +3283,7 @@ public class TileProcessor { ...@@ -3283,7 +3283,7 @@ public class TileProcessor {
lp.interPlaneCosts( // lp.interPlaneCosts( //
true, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks" true, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks"
st.planes, // final TilePlanes.PlaneData [][] planes, st.planes, // final TilePlanes.PlaneData [][] planes,
2, // -1, // debugLevel, // final int debugLevel) debugLevel, // final int debugLevel)
clt_parameters.tileX, clt_parameters.tileX,
clt_parameters.tileY); clt_parameters.tileY);
...@@ -3299,7 +3299,7 @@ public class TileProcessor { ...@@ -3299,7 +3299,7 @@ public class TileProcessor {
// false, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks" // false, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks"
true, // final boolean en_sticks, // allow merging with bad plates true, // final boolean en_sticks, // allow merging with bad plates
st.planes, // final TilePlanes.PlaneData [][] planes, st.planes, // final TilePlanes.PlaneData [][] planes,
2, // final int debugLevel) debugLevel,
clt_parameters.tileX, clt_parameters.tileX,
clt_parameters.tileY); clt_parameters.tileY);
if (debugLevel>100) System.out.println(quality_stats1.length); if (debugLevel>100) System.out.println(quality_stats1.length);
...@@ -3311,7 +3311,7 @@ public class TileProcessor { ...@@ -3311,7 +3311,7 @@ public class TileProcessor {
lp.setExclusiveLinks( lp.setExclusiveLinks(
st.planes, // final TilePlanes.PlaneData [][] planes, st.planes, // final TilePlanes.PlaneData [][] planes,
lp.getExNeibCost(), // final double max_cost, lp.getExNeibCost(), // final double max_cost,
2, // -1, // debugLevel, // final int debugLevel) debugLevel, // final int debugLevel)
clt_parameters.tileX, clt_parameters.tileX,
clt_parameters.tileY); clt_parameters.tileY);
...@@ -3320,7 +3320,7 @@ public class TileProcessor { ...@@ -3320,7 +3320,7 @@ public class TileProcessor {
// false, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks" // false, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks"
true, // final boolean en_sticks, // allow merging with bad plates true, // final boolean en_sticks, // allow merging with bad plates
st.planes, // final TilePlanes.PlaneData [][] planes, st.planes, // final TilePlanes.PlaneData [][] planes,
2, // -1, // debugLevel, // final int debugLevel) debugLevel, // final int debugLevel)
clt_parameters.tileX, clt_parameters.tileX,
clt_parameters.tileY); clt_parameters.tileY);
...@@ -3335,7 +3335,7 @@ public class TileProcessor { ...@@ -3335,7 +3335,7 @@ public class TileProcessor {
2, // starSteps, // final int steps, 2, // starSteps, // final int steps,
st.planes, // final TilePlanes.PlaneData [][] planes, st.planes, // final TilePlanes.PlaneData [][] planes,
clt_parameters.plPreferDisparity, // preferDisparity, // final boolean preferDisparity) clt_parameters.plPreferDisparity, // preferDisparity, // final boolean preferDisparity)
0); // debugLevel); debugLevel-2);
...@@ -3360,7 +3360,7 @@ public class TileProcessor { ...@@ -3360,7 +3360,7 @@ public class TileProcessor {
plane_nooverlaps, // final boolean [][][] valid_candidates, // will be updated plane_nooverlaps, // final boolean [][][] valid_candidates, // will be updated
true, // final boolean merge_low_eigen, here it should be true true, // final boolean merge_low_eigen, here it should be true
true, // final boolean useNonExcl, // consider only directions available for non-exclusive merges true, // final boolean useNonExcl, // consider only directions available for non-exclusive merges
2, // -1, // debugLevel, // final int debugLevel) debugLevel, // final int debugLevel)
clt_parameters.tileX, clt_parameters.tileX,
clt_parameters.tileY); clt_parameters.tileY);
...@@ -3375,7 +3375,7 @@ public class TileProcessor { ...@@ -3375,7 +3375,7 @@ public class TileProcessor {
merge_candidates, // final int [][][] merge_candidates, merge_candidates, // final int [][][] merge_candidates,
plane_nooverlaps, // final boolean [][][] valid_candidates, // will be updated plane_nooverlaps, // final boolean [][][] valid_candidates, // will be updated
1.0, // double relax, 1.0, // double relax,
2, // -1, // debugLevel, // final int debugLevel) debugLevel, // final int debugLevel)
clt_parameters.tileX, clt_parameters.tileX,
clt_parameters.tileY); clt_parameters.tileY);
// System.out.println("merge_cost_data.length = " + merge_cost_data.length); // System.out.println("merge_cost_data.length = " + merge_cost_data.length);
...@@ -3389,7 +3389,7 @@ public class TileProcessor { ...@@ -3389,7 +3389,7 @@ public class TileProcessor {
st.planes, // final TilePlanes.PlaneData [][] planes, st.planes, // final TilePlanes.PlaneData [][] planes,
merge_candidates, // final int [][][] merge_candidates, merge_candidates, // final int [][][] merge_candidates,
plane_nooverlaps, // final boolean [][][] valid_candidates, // will be updated plane_nooverlaps, // final boolean [][][] valid_candidates, // will be updated
2, // -1, // debugLevel, // final int debugLevel) debugLevel, // final int debugLevel)
clt_parameters.tileX, clt_parameters.tileX,
clt_parameters.tileY); clt_parameters.tileY);
...@@ -3424,7 +3424,7 @@ public class TileProcessor { ...@@ -3424,7 +3424,7 @@ public class TileProcessor {
clt_parameters.plTargetEigen, // final double targetEigen, // = 0.1; // Remove outliers until main axis eigenvalue (possibly scaled by plDispNorm) gets below clt_parameters.plTargetEigen, // final double targetEigen, // = 0.1; // Remove outliers until main axis eigenvalue (possibly scaled by plDispNorm) gets below
clt_parameters.plFractOutliers, // final double fractOutliers, // = 0.3; // Maximal fraction of outliers to remove clt_parameters.plFractOutliers, // final double fractOutliers, // = 0.3; // Maximal fraction of outliers to remove
clt_parameters.plMaxOutliers, // final int maxOutliers, // = 20; // Maximal number of outliers to remove clt_parameters.plMaxOutliers, // final int maxOutliers, // = 20; // Maximal number of outliers to remove
2, // -1, // debugLevel, // final int debugLevel) debugLevel, // final int debugLevel)
clt_parameters.tileX, clt_parameters.tileX,
clt_parameters.tileY); clt_parameters.tileY);
...@@ -3582,13 +3582,24 @@ public class TileProcessor { ...@@ -3582,13 +3582,24 @@ public class TileProcessor {
LinkPlanes lp = new LinkPlanes (clt_parameters, st); LinkPlanes lp = new LinkPlanes (clt_parameters, st);
// condition supertiles (create and manage links, merge) // condition supertiles (create and manage links, merge)
conditionSuperTiles( lp.conditionSuperTiles(
clt_parameters, //EyesisCorrectionParameters.CLTParameters clt_parameters, st.planes, // final TilePlanes.PlaneData [][] planes,
geometryCorrection, // GeometryCorrection geometryCorrection, 10, // final int max_num_merge_try,
st, // SuperTiles st, 0); // debugLevel); // final int debugLevel);
lp, // LinkPlanes lp, // Used only by conflicts (not processed currently)
debugLevel); // final int debugLevel); lp.calcStarValueStrength(
true, // boolean set_start_planes,
clt_parameters.plStarOrtho, // orthoWeight, // final double orthoWeight,
clt_parameters.plStarDiag, // diagonalWeight, // final double diagonalWeight,
clt_parameters.plStarPwr, // starPwr, // final double starPwr, // Divide cost by number of connections to this power
clt_parameters.plStarWeightPwr,// starWeightPwr, // final double starWeightPwr, // Use this power of tile weight when calculating connection cost
clt_parameters.plWeightToDens, // weightToDens, // Balance weighted density against density. 0.0 - density, 1.0 - weighted density
clt_parameters.plStarValPwr, // starValPwr, //double starValPwr, // Raise value of each tile before averaging
2, // starSteps, // final int steps,
st.planes, // final TilePlanes.PlaneData [][] planes,
clt_parameters.plPreferDisparity, // preferDisparity, // final boolean preferDisparity)
debugLevel-2);
// re-generate planes in the supertiles using previously calculated planes (for tghe tiles and their neighbors) // re-generate planes in the supertiles using previously calculated planes (for tghe tiles and their neighbors)
// as hints, new planes will be assumed parallel to the known and possibly slightly offset in disparity // as hints, new planes will be assumed parallel to the known and possibly slightly offset in disparity
if (clt_parameters.plDiscrEn) { if (clt_parameters.plDiscrEn) {
...@@ -3627,37 +3638,50 @@ public class TileProcessor { ...@@ -3627,37 +3638,50 @@ public class TileProcessor {
clt_parameters.plDiscrGrown, // final int plDiscrGrown, // = 0; // Only use tiles within this range from original selection clt_parameters.plDiscrGrown, // final int plDiscrGrown, // = 0; // Only use tiles within this range from original selection
clt_parameters.plDiscrXMedian, // final double plDiscrXMedian, // = 1.5; // Remove outliers from the final selection that have distance more than scaled median clt_parameters.plDiscrXMedian, // final double plDiscrXMedian, // = 1.5; // Remove outliers from the final selection that have distance more than scaled median
0, // -1, // debugLevel, // final int debugLevel) debugLevel, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX, clt_parameters.tileX,
clt_parameters.tileY); clt_parameters.tileY);
// condition the redcefined planes // condition the redcefined planes
conditionSuperTiles( lp.conditionSuperTiles(
clt_parameters, //EyesisCorrectionParameters.CLTParameters clt_parameters, st.planes, // final TilePlanes.PlaneData [][] planes,
geometryCorrection, // GeometryCorrection geometryCorrection, 10, // final int max_num_merge_try,
st, // SuperTiles st,
lp, // LinkPlanes lp,
debugLevel); // final int debugLevel); debugLevel); // final int debugLevel);
lp.calcStarValueStrength(
true, // boolean set_start_planes,
clt_parameters.plStarOrtho, // orthoWeight, // final double orthoWeight,
clt_parameters.plStarDiag, // diagonalWeight, // final double diagonalWeight,
clt_parameters.plStarPwr, // starPwr, // final double starPwr, // Divide cost by number of connections to this power
clt_parameters.plStarWeightPwr,// starWeightPwr, // final double starWeightPwr, // Use this power of tile weight when calculating connection cost
clt_parameters.plWeightToDens, // weightToDens, // Balance weighted density against density. 0.0 - density, 1.0 - weighted density
clt_parameters.plStarValPwr, // starValPwr, //double starValPwr, // Raise value of each tile before averaging
2, // starSteps, // final int steps,
st.planes, // final TilePlanes.PlaneData [][] planes,
clt_parameters.plPreferDisparity, // preferDisparity, // final boolean preferDisparity)
debugLevel - 2);
} }
/*
double [][] quality_stats1 = lp.selectNeighborPlanesMutual( double [][] quality_stats1 = lp.selectNeighborPlanesMutual(
// false, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks" // false, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks"
true, // final boolean en_sticks, // allow merging with bad plates true, // final boolean en_sticks, // allow merging with bad plates
st.planes, // final TilePlanes.PlaneData [][] planes, st.planes, // final TilePlanes.PlaneData [][] planes,
2, // final int debugLevel) debugLevel,
clt_parameters.tileX, clt_parameters.tileX,
clt_parameters.tileY); clt_parameters.tileY);
if (debugLevel>100) System.out.println(quality_stats1.length); if (debugLevel>100) System.out.println(quality_stats1.length);
System.out.println("Testing - overwriting selectNeighborPlanesMutual() results with setExclusiveLinks()"); System.out.println("Testing - overwriting selectNeighborPlanesMutual() results with setExclusiveLinks()");
//Just overwrite results of the previous method //Just overwrite results of the previous method
*/
lp.setExclusiveLinks( lp.setExclusiveLinks(
st.planes, // final TilePlanes.PlaneData [][] planes, st.planes, // final TilePlanes.PlaneData [][] planes,
lp.getExNeibCost(), // final double max_cost, lp.getExNeibCost(), // final double max_cost,
2, // -1, // debugLevel, // final int debugLevel) debugLevel, // final int debugLevel)
clt_parameters.tileX, clt_parameters.tileX,
clt_parameters.tileY); clt_parameters.tileY);
/*
st.resolveConflicts( st.resolveConflicts(
lp, // LinkPlanes lp, lp, // LinkPlanes lp,
clt_parameters.plMaxEigen, clt_parameters.plMaxEigen,
...@@ -3676,31 +3700,12 @@ public class TileProcessor { ...@@ -3676,31 +3700,12 @@ public class TileProcessor {
clt_parameters.plNewConfl, // boolean preferDisparity, // Allow more conflicts if overall cost is reduced clt_parameters.plNewConfl, // boolean preferDisparity, // Allow more conflicts if overall cost is reduced
clt_parameters.plMaxChanges, // int maxChanges, // Maximal number of simultaneous connection changes around one tile (0 - any) clt_parameters.plMaxChanges, // int maxChanges, // Maximal number of simultaneous connection changes around one tile (0 - any)
clt_parameters.plPreferDisparity, clt_parameters.plPreferDisparity,
1, // final int debugLevel) debugLevel,
clt_parameters.tileX, clt_parameters.tileX,
clt_parameters.tileY); clt_parameters.tileY);
/* */
st.resolveConflicts(
clt_parameters.plMaxEigen, if (clt_parameters.plSplitApply) {
clt_parameters.plConflDualTri, // boolean conflDualTri, // Resolve dual triangles conflict (odoodo)
clt_parameters.plConflMulti, // boolean conflMulti, // Resolve multiple odo triangles conflicts
clt_parameters.plConflDiag, // boolean conflDiag, // Resolve diagonal (ood) conflicts
clt_parameters.plConflStar, // boolean conflStar, // Resolve all conflicts around a supertile
clt_parameters.plStarSteps, // int starSteps, // How far to look around when calculating connection cost
clt_parameters.plStarOrtho, // double orthoWeight,
clt_parameters.plStarDiag, // double diagonalWeight,
clt_parameters.plStarPwr, // double starPwr, // Divide cost by number of connections to this power
clt_parameters.plStarValPwr, // double starValPwr, // Raise value of each tile before averaging
clt_parameters.plDblTriLoss, // double diagonalWeight,
true, // clt_parameters.plNewConfl, // Allow more conflicts if overall cost is reduced
clt_parameters.plPreferDisparity,
1, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
*/
if (clt_parameters.plSplitApply) {
while (true) { while (true) {
int num_added = 0; int num_added = 0;
num_added += st.fillSquares(); num_added += st.fillSquares();
...@@ -3721,8 +3726,6 @@ public class TileProcessor { ...@@ -3721,8 +3726,6 @@ public class TileProcessor {
clt_parameters.stMeasSel); // int stMeasSel) // = 1; // Select measurements for supertiles : +1 - combo, +2 - quad +4 - hor +8 - vert) clt_parameters.stMeasSel); // int stMeasSel) // = 1; // Select measurements for supertiles : +1 - combo, +2 - quad +4 - hor +8 - vert)
TilePlanes.PlaneData[][][] split_planes = // use original (measured planes. See if smoothed are needed here) TilePlanes.PlaneData[][][] split_planes = // use original (measured planes. See if smoothed are needed here)
st.breakPlanesToPairs( st.breakPlanesToPairs(
st.getPlanes(), // Mod(), // final TilePlanes.PlaneData[][] center_planes, // measured_planes, st.getPlanes(), // Mod(), // final TilePlanes.PlaneData[][] center_planes, // measured_planes,
...@@ -3733,7 +3736,7 @@ public class TileProcessor { ...@@ -3733,7 +3736,7 @@ public class TileProcessor {
clt_parameters.plSplitMinQuality, // final double splitMinQuality, // = 1.1; // Minimal split quality to show clt_parameters.plSplitMinQuality, // final double splitMinQuality, // = 1.1; // Minimal split quality to show
clt_parameters.plPreferDisparity, clt_parameters.plPreferDisparity,
1, // final int debugLevel) debugLevel,
clt_parameters.tileX, clt_parameters.tileX,
clt_parameters.tileY); clt_parameters.tileY);
...@@ -3766,7 +3769,7 @@ public class TileProcessor { ...@@ -3766,7 +3769,7 @@ public class TileProcessor {
clt_parameters.stSmplSide , // final int smplSide, // = 2; // Sample size (side of a square) clt_parameters.stSmplSide , // final int smplSide, // = 2; // Sample size (side of a square)
clt_parameters.stSmplNum , // final int smplNum, // = 3; // Number after removing worst clt_parameters.stSmplNum , // final int smplNum, // = 3; // Number after removing worst
clt_parameters.stSmplRms , // final double smplRms, // = 0.1; // Maximal RMS of the remaining tiles in a sample clt_parameters.stSmplRms , // final double smplRms, // = 0.1; // Maximal RMS of the remaining tiles in a sample
1, // final int debugLevel) debugLevel, // 1, // final int debugLevel)
clt_parameters.tileX, clt_parameters.tileX,
clt_parameters.tileY); clt_parameters.tileY);
if (debugLevel > -1){ if (debugLevel > -1){
...@@ -3776,21 +3779,21 @@ public class TileProcessor { ...@@ -3776,21 +3779,21 @@ public class TileProcessor {
lp = new LinkPlanes (clt_parameters, st); lp = new LinkPlanes (clt_parameters, st);
lp.matchPlanes( lp.matchPlanes(
st.planes, // final TilePlanes.PlaneData [][] planes, st.planes, // final TilePlanes.PlaneData [][] planes,
2, // -1, // debugLevel, // final int debugLevel) debugLevel, // final int debugLevel)
clt_parameters.tileX, clt_parameters.tileX,
clt_parameters.tileY); clt_parameters.tileY);
lp.interPlaneCosts( // not used yet, just for testing lp.interPlaneCosts( // not used yet, just for testing
true, // final boolean en_sticks, // allow merging with bad plates true, // final boolean en_sticks, // allow merging with bad plates
st.planes, // final TilePlanes.PlaneData [][] planes, st.planes, // final TilePlanes.PlaneData [][] planes,
2, // -1, // debugLevel, // final int debugLevel) debugLevel, // final int debugLevel)
clt_parameters.tileX, clt_parameters.tileX,
clt_parameters.tileY); clt_parameters.tileY);
lp.filterNeighborPlanes( lp.filterNeighborPlanes(
st.planes, // final TilePlanes.PlaneData [][] planes, st.planes, // final TilePlanes.PlaneData [][] planes,
true, // final boolean merge_low_eigen, true, // final boolean merge_low_eigen,
2, // -1, // debugLevel, // final int debugLevel) debugLevel, // final int debugLevel)
clt_parameters.tileX, clt_parameters.tileX,
clt_parameters.tileY); clt_parameters.tileY);
...@@ -3800,7 +3803,7 @@ public class TileProcessor { ...@@ -3800,7 +3803,7 @@ public class TileProcessor {
// false, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks" // false, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks"
true, // final boolean en_sticks, // allow merging with bad plates true, // final boolean en_sticks, // allow merging with bad plates
st.planes, // final TilePlanes.PlaneData [][] planes, st.planes, // final TilePlanes.PlaneData [][] planes,
0, // final int debugLevel) debugLevel, // 0, // final int debugLevel)
clt_parameters.tileX, clt_parameters.tileX,
clt_parameters.tileY); clt_parameters.tileY);
if (debugLevel>100) System.out.println(quality_stats2.length); if (debugLevel>100) System.out.println(quality_stats2.length);
...@@ -3811,7 +3814,7 @@ public class TileProcessor { ...@@ -3811,7 +3814,7 @@ public class TileProcessor {
lp.setExclusiveLinks( lp.setExclusiveLinks(
st.planes, // final TilePlanes.PlaneData [][] planes, st.planes, // final TilePlanes.PlaneData [][] planes,
lp.getExNeibCost(), // final double max_cost, lp.getExNeibCost(), // final double max_cost,
2, // -1, // debugLevel, // final int debugLevel) debugLevel, // final int debugLevel)
clt_parameters.tileX, clt_parameters.tileX,
clt_parameters.tileY); clt_parameters.tileY);
...@@ -3833,11 +3836,12 @@ public class TileProcessor { ...@@ -3833,11 +3836,12 @@ public class TileProcessor {
clt_parameters.plNewConfl, // boolean preferDisparity, // Allow more conflicts if overall cost is reduced clt_parameters.plNewConfl, // boolean preferDisparity, // Allow more conflicts if overall cost is reduced
clt_parameters.plMaxChanges, // int maxChanges, // Maximal number of simultaneous connection changes around one tile (0 - any) clt_parameters.plMaxChanges, // int maxChanges, // Maximal number of simultaneous connection changes around one tile (0 - any)
clt_parameters.plPreferDisparity, clt_parameters.plPreferDisparity,
1, // final int debugLevel) debugLevel, // 1, // final int debugLevel)
clt_parameters.tileX, clt_parameters.tileX,
clt_parameters.tileY); clt_parameters.tileY);
} } // if (clt_parameters.plSplitApply)
while (true) { while (true) {
int num_added = 0; int num_added = 0;
if (clt_parameters.plFillSquares){ if (clt_parameters.plFillSquares){
...@@ -3861,236 +3865,173 @@ public class TileProcessor { ...@@ -3861,236 +3865,173 @@ public class TileProcessor {
// smooth planes (by averaging with neighbors and the "measured" one with variable "pull") // smooth planes (by averaging with neighbors and the "measured" one with variable "pull")
// double relax_for_conflicts = lp.getConflRelax(); // 1.5; // double relax_for_conflicts = lp.getConflRelax(); // 1.5;
int max_num_tries = 20;
if (clt_parameters.plIterations > 0) { if (clt_parameters.plIterations > 0) {
for (int num_merge_try = 0; num_merge_try < 10; num_merge_try ++ ) { // smooth and merge
st.resetPlanesMod(); // clean start
planes_mod = st.planesSmooth(
lp, // LinkPlanes lp,
clt_parameters.plPull, // final double meas_pull,// relative pull of the original (measured) plane with respect to the average of the neighbors
clt_parameters.plMaxEigen, // final double maxValue, // do not combine with too bad planes
clt_parameters.plIterations, // final int num_passes,
clt_parameters.plStopBad, // Do not update supertile if any of connected neighbors is not good (false: just skip that neighbor)
clt_parameters.plNormPow, // 0.0: 8 neighbors pull 8 times as 1, 1.0 - same as 1
Math.pow(10.0, -clt_parameters.plPrecision), // final double maxDiff, // maximal change in any of the disparity values
clt_parameters.plPreferDisparity,
0, // 1,// 0, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
// create costs for the modified planes
lp.interPlaneCosts(
true, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks"
st.planes_mod, // final TilePlanes.PlaneData [][] planes,
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
lp.setExclusiveLinks(
st.planes_mod, // final TilePlanes.PlaneData [][] planes,
// 2.5, //final double max_cost
lp.getExNeibCost()*lp.getExNeibSmooth(), // final double max_cost,
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
// once more after updating exclusive links
planes_mod = st.planesSmooth(
lp, // LinkPlanes lp,
clt_parameters.plPull, // final double meas_pull,// relative pull of the original (measured) plane with respect to the average of the neighbors
clt_parameters.plMaxEigen, // final double maxValue, // do not combine with too bad planes
clt_parameters.plIterations, // final int num_passes,
clt_parameters.plStopBad, // Do not update supertile if any of connected neighbors is not good (false: just skip that neighbor)
clt_parameters.plNormPow, // 0.0: 8 neighbors pull 8 times as 1, 1.0 - same as 1
Math.pow(10.0, -clt_parameters.plPrecision), // final double maxDiff, // maximal change in any of the disparity values
clt_parameters.plPreferDisparity,
0, // 1,// 0, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
lp.interPlaneCosts(
true, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks"
st.planes_mod, // final TilePlanes.PlaneData [][] planes,
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
// recalculate links? more smooth?
lp.setExclusiveLinks(
st.planes_mod, // final TilePlanes.PlaneData [][] planes,
// 2.5, //final double max_cost
lp.getExNeibCost()*lp.getExNeibSmooth(), // final double max_cost,
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
// just in case? Not yet needed
lp.setNonExclusive(
// false, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks"
true, // final boolean en_sticks, // allow merging with bad plates
st.planes_mod, // final TilePlanes.PlaneData [][] planes,
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
// see if some modified planes need to be merged (but merge originals)
// TODO: Stricter requirements for merging here than for original planes?
int [][][] merge_candidates = lp.getMergeSameTileCandidates(
st.planes_mod, // final TilePlanes.PlaneData [][] planes,
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
boolean [][][] plane_nooverlaps = lp.overlapSameTileCandidates ( if (debugLevel <= 100) {
st.planes_mod, // final TilePlanes.PlaneData [][] planes, st.planes_mod = lp.planesSmoothAndMerge(
merge_candidates, // final int [][][] merge_candidates, st.planes, // final TilePlanes.PlaneData[][] planes, // planes will be modified
0.2, // final double min_distance, max_num_tries, // final int max_num_tries,
2, // -1, // debugLevel, // final int debugLevel) debugLevel); // final int debugLevel)
clt_parameters.tileX, // currently results of below calcStarValueStrength() are not used, just to fill instances fields
clt_parameters.tileY); lp.calcStarValueStrength(
true, // boolean set_start_planes,
// remove merge candidates that break connections to neighbors clt_parameters.plStarOrtho, // orthoWeight, // final double orthoWeight,
if (debugLevel>100) lp.keepSameTileConnections( clt_parameters.plStarDiag, // diagonalWeight, // final double diagonalWeight,
st.planes_mod, // final TilePlanes.PlaneData [][] planes, clt_parameters.plStarPwr, // starPwr, // final double starPwr, // Divide cost by number of connections to this power
merge_candidates, // final int [][][] merge_candidates, clt_parameters.plStarWeightPwr,// starWeightPwr, // final double starWeightPwr, // Use this power of tile weight when calculating connection cost
plane_nooverlaps, // final boolean [][][] valid_candidates, // will be updated clt_parameters.plWeightToDens, // weightToDens, // Balance weighted density against density. 0.0 - density, 1.0 - weighted density
true, // final boolean merge_low_eigen, here it should be true clt_parameters.plStarValPwr, // starValPwr, //double starValPwr, // Raise value of each tile before averaging
true, // final boolean useNonExcl, // consider only directions available for non-exclusive merges 2, // starSteps, // final int steps,
2, // -1, // debugLevel, // final int debugLevel) st.planes, // final TilePlanes.PlaneData [][] planes,
clt_parameters.tileX, clt_parameters.plPreferDisparity, // preferDisparity, // final boolean preferDisparity)
clt_parameters.tileY); 0); // debugLevel);
planes_mod = st.planes_mod; // temporarily
// Consider supertiles with conflicts, merge conflicting layers with relaxed requirements } else { // to be removed after testing the new version
Conflicts iconflicts0 = new Conflicts(st); for (int num_merge_try = 0; num_merge_try < max_num_tries; num_merge_try ++ ) { // smooth and merge
int [][][] conflicts0 = iconflicts0.detectTriangularConflicts( st.resetPlanesMod(); // clean start
1); // final int debugLevel) planes_mod = st.planesSmooth_old(
lp, // LinkPlanes lp,
int [][][] conflicting_candidates = lp.filterPairsByConflicts( clt_parameters.plPull, // final double meas_pull,// relative pull of the original (measured) plane with respect to the average of the neighbors
st.planes_mod, // final TilePlanes.PlaneData [][] planes, clt_parameters.plMaxEigen, // final double maxValue, // do not combine with too bad planes
merge_candidates, // final int [][][] merge_candidates, clt_parameters.plIterations, // final int num_passes,
conflicts0); // final int [][][] conflicts) clt_parameters.plStopBad, // Do not update supertile if any of connected neighbors is not good (false: just skip that neighbor)
clt_parameters.plNormPow, // 0.0: 8 neighbors pull 8 times as 1, 1.0 - same as 1
Math.pow(10.0, -clt_parameters.plPrecision), // final double maxDiff, // maximal change in any of the disparity values
// * Possible problem is that "normalizing" merge quality for low weights is not applicable for "star" plane that include neighhbors clt_parameters.plPreferDisparity,
// * Switch to a single "cost" function (costSameTileConnectionsAlt()) debugLevel, // 0, // 1,// 0, // final int debugLevel)
// Still - how to merge stray tiles that do not have neighbors/star? Still merge them "old way" (costSameTileConnections()) if at least 1 does not clt_parameters.tileX,
// have a "star" clt_parameters.tileY);
lp.costSameTileConnections( // create costs for the modified planes
st.planes_mod, // final TilePlanes.PlaneData [][] planes, lp.interPlaneCosts(
merge_candidates, // final int [][][] merge_candidates, true, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks"
plane_nooverlaps, // final boolean [][][] valid_candidates, // will be updated st.planes_mod, // final TilePlanes.PlaneData [][] planes,
1.0, // final double relax, debugLevel, // final int debugLevel)
2, // -1, // debugLevel, // final int debugLevel) clt_parameters.tileX,
clt_parameters.tileX, clt_parameters.tileY);
clt_parameters.tileY); lp.setExclusiveLinks(
// System.out.println("merge_cost_data.length = " + merge_cost_data.length); st.planes_mod, // final TilePlanes.PlaneData [][] planes,
// 2.5, //final double max_cost
lp.costSameTileConnectionsAlt( lp.getExNeibCost()*lp.getExNeibSmooth(), // final double max_cost,
//5.0, // final double threshold, debugLevel, // final int debugLevel)
//10.0, // final double threshold_nostar, clt_parameters.tileX,
lp.getMergeCostStar(), // relax_for_conflicts * 5.0, // final double threshold, // clt_parameters.tileY);
lp.getMergeCostNoStar(), //relax_for_conflicts * 10.0, // final double threshold_nostar, // once more after updating exclusive links
planes_mod = st.planesSmooth_old(
lp, // LinkPlanes lp,
st.planes_mod, // final TilePlanes.PlaneData [][] planes, clt_parameters.plPull, // final double meas_pull,// relative pull of the original (measured) plane with respect to the average of the neighbors
merge_candidates, // final int [][][] merge_candidates, clt_parameters.plMaxEigen, // final double maxValue, // do not combine with too bad planes
plane_nooverlaps, // final boolean [][][] valid_candidates, // will be updated clt_parameters.plIterations, // final int num_passes,
2, // -1, // debugLevel, // final int debugLevel) clt_parameters.plStopBad, // Do not update supertile if any of connected neighbors is not good (false: just skip that neighbor)
clt_parameters.tileX, clt_parameters.plNormPow, // 0.0: 8 neighbors pull 8 times as 1, 1.0 - same as 1
clt_parameters.tileY); Math.pow(10.0, -clt_parameters.plPrecision), // final double maxDiff, // maximal change in any of the disparity values
clt_parameters.plPreferDisparity,
int [][][] merge_groups = lp.extractMergeSameTileGroups( debugLevel, // 0, // 1,// 0, // final int debugLevel)
st.planes_mod, // final TilePlanes.PlaneData [][] planes, clt_parameters.tileX,
merge_candidates, // final int [][][] merge_candidates, clt_parameters.tileY);
plane_nooverlaps, // boolean [][][] plane_overlaps, lp.interPlaneCosts(
1.0, // final double relax, true, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks"
2, // -1, // debugLevel, // final int debugLevel) st.planes_mod, // final TilePlanes.PlaneData [][] planes,
clt_parameters.tileX, debugLevel, // final int debugLevel)
clt_parameters.tileY); clt_parameters.tileX,
clt_parameters.tileY);
int num_removed_by_merging = st.applyMergePlanes( // recalculate links? more smooth?
st.planes, // final TilePlanes.PlaneData[][] planes, lp.setExclusiveLinks(
merge_groups, // final int [][][] merge_groups, st.planes_mod, // final TilePlanes.PlaneData [][] planes,
// parameters to generate ellipsoids // 2.5, //final double max_cost
0.0, // 3, // final double disp_far, // minimal disparity to select (or NaN) lp.getExNeibCost()*lp.getExNeibSmooth(), // final double max_cost,
Double.NaN, // final double disp_near, // maximal disparity to select (or NaN) debugLevel, // final int debugLevel)
clt_parameters.plDispNorm, // final double dispNorm, // Normalize disparities to the average if above clt_parameters.tileX,
0.0, // final double min_weight, clt_parameters.tileY);
clt_parameters.plMinPoints, // final int min_tiles, // just in case? Not yet needed
// parameters to reduce outliers lp.setNonExclusive(
clt_parameters.plTargetEigen, // final double targetEigen, // = 0.1; // Remove outliers until main axis eigenvalue (possibly scaled by plDispNorm) gets below // false, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks"
clt_parameters.plFractOutliers, // final double fractOutliers, // = 0.3; // Maximal fraction of outliers to remove true, // final boolean en_sticks, // allow merging with bad plates
clt_parameters.plMaxOutliers, // final int maxOutliers, // = 20; // Maximal number of outliers to remove st.planes_mod, // final TilePlanes.PlaneData [][] planes,
2, // -1, // debugLevel, // final int debugLevel) debugLevel, // final int debugLevel)
clt_parameters.tileX, clt_parameters.tileX,
clt_parameters.tileY); clt_parameters.tileY);
System.out.println("Try "+num_merge_try+ ": removed "+num_removed_by_merging+" planes by merging, recalculating connections");
if (num_removed_by_merging == 0){ // re-calculate all links
// Consider supertiles with conflicts, merge conflicting layers with relaxed requirements
//TODO: Fix the mess to get rid of the plane_nooverlaps // see if some modified planes need to be merged (but merge originals)
// TODO: Stricter requirements for merging here than for original planes?
Conflicts conflicts0_stats = new Conflicts( int [][][] merge_candidates = lp.getMergeSameTileCandidates(
conflicts0, st.planes_mod, // final TilePlanes.PlaneData [][] planes,
st, debugLevel, // final int debugLevel)
-1); // debugLevel); clt_parameters.tileX,
System.out.println("Trying relaxed merging for conflicting plane pairs"); clt_parameters.tileY);
plane_nooverlaps = lp.overlapSameTileCandidates ( boolean [][][] plane_nooverlaps = lp.overlapSameTileCandidates (
st.planes_mod, // final TilePlanes.PlaneData [][] planes, st.planes_mod, // final TilePlanes.PlaneData [][] planes,
conflicting_candidates, // final int [][][] merge_candidates,\ merge_candidates, // final int [][][] merge_candidates,
0.4, // final double min_distance, 0.2, // final double min_distance, //?
2, // -1, // debugLevel, // final int debugLevel) debugLevel, // final int debugLevel)
clt_parameters.tileX, clt_parameters.tileX,
clt_parameters.tileY); clt_parameters.tileY);
// remove merge candidates that break connections to neighbors // remove merge candidates that break connections to neighbors
if (debugLevel>100) lp.keepSameTileConnections( if (debugLevel>100) lp.keepSameTileConnections(
st.planes_mod, // final TilePlanes.PlaneData [][] planes, st.planes_mod, // final TilePlanes.PlaneData [][] planes,
conflicting_candidates, // final int [][][] merge_candidates, merge_candidates, // final int [][][] merge_candidates,
plane_nooverlaps, // final boolean [][][] valid_candidates, // will be updated plane_nooverlaps, // final boolean [][][] valid_candidates, // will be updated
true, // final boolean merge_low_eigen, here it should be true true, // final boolean merge_low_eigen, here it should be true
true, // final boolean useNonExcl, // consider only directions available for non-exclusive merges true, // final boolean useNonExcl, // consider only directions available for non-exclusive merges
2, // -1, // debugLevel, // final int debugLevel) debugLevel, // final int debugLevel)
clt_parameters.tileX, clt_parameters.tileX,
clt_parameters.tileY); clt_parameters.tileY);
// Consider supertiles with conflicts, merge conflicting layers with relaxed requirements // Consider supertiles with conflicts, merge conflicting layers with relaxed requirements
//end of TODO: Fix the mess to get rid of the plane_nooverlaps Conflicts iconflicts0 = new Conflicts(st);
int [][][] conflicts0 = iconflicts0.detectTriangularConflicts(
debugLevel); // 1); // final int debugLevel)
int [][][] conflicting_candidates = lp.filterPairsByConflicts(
st.planes_mod, // final TilePlanes.PlaneData [][] planes,
merge_candidates, // final int [][][] merge_candidates,
conflicts0); // final int [][][] conflicts)
// try to merge original (measured) planes, not smoothed ones
// * Possible problem is that "normalizing" merge quality for low weights is not applicable for "star" plane that include neighhbors
// * Switch to a single "cost" function (costSameTileConnectionsAlt())
// Still - how to merge stray tiles that do not have neighbors/star? Still merge them "old way" (costSameTileConnections()) if at least 1 does not
// have a "star"
lp.costSameTileConnections( lp.costSameTileConnections(
// st.planes_mod, // final TilePlanes.PlaneData [][] planes, st.planes_mod, // final TilePlanes.PlaneData [][] planes,
st.planes, // final TilePlanes.PlaneData [][] planes, merge_candidates, // final int [][][] merge_candidates,
conflicting_candidates, // final int [][][] merge_candidates, plane_nooverlaps, // final boolean [][][] valid_candidates, // will be updated
plane_nooverlaps, // final boolean [][][] valid_candidates, // will be updated 1.0, // final double relax,
lp.getConflRelax(), //relax_for_conflicts, // final double relax, debugLevel, // 2, // -1, // debugLevel, // final int debugLevel)
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX, clt_parameters.tileX,
clt_parameters.tileY); clt_parameters.tileY);
// System.out.println("merge_cost_data.length = " + merge_cost_data.length); // System.out.println("merge_cost_data.length = " + merge_cost_data.length);
lp.costSameTileConnectionsAlt( lp.costSameTileConnectionsAlt(
// relax_for_conflicts, // final double relax, //5.0, // final double threshold,
//10.0, // final double threshold_nostar,
lp.getMergeCostStar(), // relax_for_conflicts * 5.0, // final double threshold, //
lp.getMergeCostNoStar(), //relax_for_conflicts * 10.0, // final double threshold_nostar,
lp.getConflRelax() * lp.getMergeCostStar(), // relax_for_conflicts * 5.0, // final double threshold, //
lp.getConflRelax() * lp.getMergeCostNoStar(), //relax_for_conflicts * 10.0, // final double threshold_nostar,
// st.planes_mod, // final TilePlanes.PlaneData [][] planes, st.planes_mod, // final TilePlanes.PlaneData [][] planes,
st.planes, // final TilePlanes.PlaneData [][] planes, merge_candidates, // final int [][][] merge_candidates,
conflicting_candidates, // final int [][][] merge_candidates,
plane_nooverlaps, // final boolean [][][] valid_candidates, // will be updated plane_nooverlaps, // final boolean [][][] valid_candidates, // will be updated
2, // -1, // debugLevel, // final int debugLevel) debugLevel, // final int debugLevel)
clt_parameters.tileX, clt_parameters.tileX,
clt_parameters.tileY); clt_parameters.tileY);
merge_groups = lp.extractMergeSameTileGroups( int [][][] merge_groups = lp.extractMergeSameTileGroups(
// st.planes_mod, // final TilePlanes.PlaneData [][] planes, st.planes_mod, // final TilePlanes.PlaneData [][] planes,
st.planes, // final TilePlanes.PlaneData [][] planes, merge_candidates, // final int [][][] merge_candidates,
conflicting_candidates, // final int [][][] merge_candidates, plane_nooverlaps, // boolean [][][] plane_overlaps,
plane_nooverlaps, // boolean [][][] plane_overlaps, 1.0, // final double relax,
lp.getConflRelax(), // relax_for_conflicts, // final double relax, debugLevel + 1, // 2, // -1, // debugLevel, // final int debugLevel)
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX, clt_parameters.tileX,
clt_parameters.tileY); clt_parameters.tileY);
num_removed_by_merging = st.applyMergePlanes( int num_removed_by_merging = st.applyMergePlanes(
st.planes, // final TilePlanes.PlaneData[][] planes, st.planes, // final TilePlanes.PlaneData[][] planes,
merge_groups, // final int [][][] merge_groups, merge_groups, // final int [][][] merge_groups,
// parameters to generate ellipsoids // parameters to generate ellipsoids
...@@ -4103,93 +4044,185 @@ public class TileProcessor { ...@@ -4103,93 +4044,185 @@ public class TileProcessor {
clt_parameters.plTargetEigen, // final double targetEigen, // = 0.1; // Remove outliers until main axis eigenvalue (possibly scaled by plDispNorm) gets below clt_parameters.plTargetEigen, // final double targetEigen, // = 0.1; // Remove outliers until main axis eigenvalue (possibly scaled by plDispNorm) gets below
clt_parameters.plFractOutliers, // final double fractOutliers, // = 0.3; // Maximal fraction of outliers to remove clt_parameters.plFractOutliers, // final double fractOutliers, // = 0.3; // Maximal fraction of outliers to remove
clt_parameters.plMaxOutliers, // final int maxOutliers, // = 20; // Maximal number of outliers to remove clt_parameters.plMaxOutliers, // final int maxOutliers, // = 20; // Maximal number of outliers to remove
2, // -1, // debugLevel, // final int debugLevel) debugLevel, // 2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX, clt_parameters.tileX,
clt_parameters.tileY); clt_parameters.tileY);
System.out.println("Try "+num_merge_try+ ": removed "+num_removed_by_merging+" conflicting planes by merging, recalculating connections");
System.out.println("Try "+num_merge_try+ ": removed "+num_removed_by_merging+" planes by merging, recalculating connections");
if (num_removed_by_merging == 0){ // re-calculate all links if (num_removed_by_merging == 0){ // re-calculate all links
break; // Consider supertiles with conflicts, merge conflicting layers with relaxed requirements
//TODO: Fix the mess to get rid of the plane_nooverlaps
Conflicts conflicts0_stats = new Conflicts(
conflicts0,
st,
-1); // debugLevel);
System.out.println("Trying relaxed merging for conflicting plane pairs");
plane_nooverlaps = lp.overlapSameTileCandidates (
st.planes_mod, // final TilePlanes.PlaneData [][] planes,
conflicting_candidates, // final int [][][] merge_candidates,\
0.4, // final double min_distance,
debugLevel, // 2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
// remove merge candidates that break connections to neighbors
if (debugLevel>100) lp.keepSameTileConnections(
st.planes_mod, // final TilePlanes.PlaneData [][] planes,
conflicting_candidates, // final int [][][] merge_candidates,
plane_nooverlaps, // final boolean [][][] valid_candidates, // will be updated
true, // final boolean merge_low_eigen, here it should be true
true, // final boolean useNonExcl, // consider only directions available for non-exclusive merges
debugLevel, // 2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
// Consider supertiles with conflicts, merge conflicting layers with relaxed requirements
//end of TODO: Fix the mess to get rid of the plane_nooverlaps
// try to merge original (measured) planes, not smoothed ones
lp.costSameTileConnections(
// st.planes_mod, // final TilePlanes.PlaneData [][] planes,
st.planes, // final TilePlanes.PlaneData [][] planes,
conflicting_candidates, // final int [][][] merge_candidates,
plane_nooverlaps, // final boolean [][][] valid_candidates, // will be updated
lp.getConflRelax(), //relax_for_conflicts, // final double relax,
debugLevel, // 2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
// System.out.println("merge_cost_data.length = " + merge_cost_data.length);
lp.costSameTileConnectionsAlt(
// relax_for_conflicts, // final double relax,
lp.getConflRelax() * lp.getMergeCostStar(), // relax_for_conflicts * 5.0, // final double threshold, //
lp.getConflRelax() * lp.getMergeCostNoStar(), //relax_for_conflicts * 10.0, // final double threshold_nostar,
// st.planes_mod, // final TilePlanes.PlaneData [][] planes,
st.planes, // final TilePlanes.PlaneData [][] planes,
conflicting_candidates, // final int [][][] merge_candidates,
plane_nooverlaps, // final boolean [][][] valid_candidates, // will be updated
debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
merge_groups = lp.extractMergeSameTileGroups(
// st.planes_mod, // final TilePlanes.PlaneData [][] planes,
st.planes, // final TilePlanes.PlaneData [][] planes,
conflicting_candidates, // final int [][][] merge_candidates,
plane_nooverlaps, // boolean [][][] plane_overlaps,
lp.getConflRelax(), // relax_for_conflicts, // final double relax,
debugLevel+ 1, // 2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
num_removed_by_merging = st.applyMergePlanes(
st.planes, // final TilePlanes.PlaneData[][] planes,
merge_groups, // final int [][][] merge_groups,
// parameters to generate ellipsoids
0.0, // 3, // final double disp_far, // minimal disparity to select (or NaN)
Double.NaN, // final double disp_near, // maximal disparity to select (or NaN)
clt_parameters.plDispNorm, // final double dispNorm, // Normalize disparities to the average if above
0.0, // final double min_weight,
clt_parameters.plMinPoints, // final int min_tiles,
// parameters to reduce outliers
clt_parameters.plTargetEigen, // final double targetEigen, // = 0.1; // Remove outliers until main axis eigenvalue (possibly scaled by plDispNorm) gets below
clt_parameters.plFractOutliers, // final double fractOutliers, // = 0.3; // Maximal fraction of outliers to remove
clt_parameters.plMaxOutliers, // final int maxOutliers, // = 20; // Maximal number of outliers to remove
debugLevel, // 2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
System.out.println("Try "+num_merge_try+ ": removed "+num_removed_by_merging+" conflicting planes by merging, recalculating connections");
if ( num_merge_try >= max_num_tries) {
System.out.println("Exceeded maximal number of iterations, beaking anyway...");
break;
}
if (num_removed_by_merging == 0){ // re-calculate all links
break;
}
} }
}
// Do the same as in conditionSuperTiles before smoothing again // Do the same as in conditionSuperTiles before smoothing again
lp.matchPlanes(
st.planes, // final TilePlanes.PlaneData [][] planes,
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
lp.interPlaneCosts( //
true, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks"
st.planes, // final TilePlanes.PlaneData [][] planes,
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
lp.filterNeighborPlanes(
st.planes, // final TilePlanes.PlaneData [][] planes,
true, // final boolean merge_low_eigen,
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
// calculate it here - use results to keep some planes from merging lp.matchPlanes(
st.planes, // final TilePlanes.PlaneData [][] planes,
debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
double [][] quality_stats2 = lp.selectNeighborPlanesMutual( lp.interPlaneCosts( //
// false, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks" true, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks"
true, // final boolean en_sticks, // allow merging with bad plates st.planes, // final TilePlanes.PlaneData [][] planes,
st.planes, // final TilePlanes.PlaneData [][] planes, debugLevel, // final int debugLevel)
2, // final int debugLevel) clt_parameters.tileX,
clt_parameters.tileX, clt_parameters.tileY);
clt_parameters.tileY);
if (debugLevel>100) System.out.println(quality_stats2.length); lp.filterNeighborPlanes(
st.planes, // final TilePlanes.PlaneData [][] planes,
true, // final boolean merge_low_eigen,
System.out.println("Testing - overwriting selectNeighborPlanesMutual() results with setExclusiveLinks()"); debugLevel, // final int debugLevel)
clt_parameters.tileX,
// Just overwrite results of the previous method clt_parameters.tileY);
lp.setExclusiveLinks(
st.planes, // final TilePlanes.PlaneData [][] planes, // calculate it here - use results to keep some planes from merging
// 2.5, //final double max_cost
lp.getExNeibCost()*lp.getExNeibSmooth(), // final double max_cost,
2, // -1, // debugLevel, // final int debugLevel)
clt_parameters.tileX, /*
clt_parameters.tileY); double [][] quality_stats2 = lp.selectNeighborPlanesMutual(
// false, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks"
true, // final boolean en_sticks, // allow merging with bad plates
lp.setNonExclusive( st.planes, // final TilePlanes.PlaneData [][] planes,
// false, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks" debugLevel, // 2, // final int debugLevel)
true, // final boolean en_sticks, // allow merging with bad plates clt_parameters.tileX,
st.planes, // final TilePlanes.PlaneData [][] planes, clt_parameters.tileY);
2, // -1, // debugLevel, // final int debugLevel) if (debugLevel>100) System.out.println(quality_stats2.length);
clt_parameters.tileX,
clt_parameters.tileY);
System.out.println("Testing - overwriting selectNeighborPlanesMutual() results with setExclusiveLinks()");
lp.calcStarValueStrength( */
true, // boolean set_start_planes, // Just overwrite results of the previous method
clt_parameters.plStarOrtho, // orthoWeight, // final double orthoWeight, lp.setExclusiveLinks(
clt_parameters.plStarDiag, // diagonalWeight, // final double diagonalWeight, st.planes, // final TilePlanes.PlaneData [][] planes,
clt_parameters.plStarPwr, // starPwr, // final double starPwr, // Divide cost by number of connections to this power // 2.5, //final double max_cost
clt_parameters.plStarWeightPwr,// starWeightPwr, // final double starWeightPwr, // Use this power of tile weight when calculating connection cost lp.getExNeibCost()*lp.getExNeibSmooth(), // final double max_cost,
clt_parameters.plWeightToDens, // weightToDens, // Balance weighted density against density. 0.0 - density, 1.0 - weighted density debugLevel, // final int debugLevel)
clt_parameters.plStarValPwr, // starValPwr, //double starValPwr, // Raise value of each tile before averaging clt_parameters.tileX,
2, // starSteps, // final int steps, clt_parameters.tileY);
st.planes, // final TilePlanes.PlaneData [][] planes,
clt_parameters.plPreferDisparity, // preferDisparity, // final boolean preferDisparity)
0); // debugLevel); lp.setNonExclusive(
// false, // final boolean en_sticks, // treat planes with second eigenvalue below plEigenStick as "sticks"
// end of possible merge, can try smoothing again true, // final boolean en_sticks, // allow merging with bad plates
st.planes, // final TilePlanes.PlaneData [][] planes,
debugLevel, // final int debugLevel)
clt_parameters.tileX,
clt_parameters.tileY);
lp.calcStarValueStrength(
true, // boolean set_start_planes,
clt_parameters.plStarOrtho, // orthoWeight, // final double orthoWeight,
clt_parameters.plStarDiag, // diagonalWeight, // final double diagonalWeight,
clt_parameters.plStarPwr, // starPwr, // final double starPwr, // Divide cost by number of connections to this power
clt_parameters.plStarWeightPwr,// starWeightPwr, // final double starWeightPwr, // Use this power of tile weight when calculating connection cost
clt_parameters.plWeightToDens, // weightToDens, // Balance weighted density against density. 0.0 - density, 1.0 - weighted density
clt_parameters.plStarValPwr, // starValPwr, //double starValPwr, // Raise value of each tile before averaging
2, // starSteps, // final int steps,
st.planes, // final TilePlanes.PlaneData [][] planes,
clt_parameters.plPreferDisparity, // preferDisparity, // final boolean preferDisparity)
0); // debugLevel);
// end of possible merge, can try smoothing again
// just to show them, not currently processed
/* // just to show them, not currently processed
* will not work - hard-wired to use planes, not planes_mod!
/*
* will not work - hard-wired to use planes, not planes_mod!
st.resolveConflicts( st.resolveConflicts(
lp, // LinkPlanes lp, lp, // LinkPlanes lp,
clt_parameters.plMaxEigen, clt_parameters.plMaxEigen,
...@@ -4211,20 +4244,14 @@ public class TileProcessor { ...@@ -4211,20 +4244,14 @@ public class TileProcessor {
1, // final int debugLevel) 1, // final int debugLevel)
clt_parameters.tileX, clt_parameters.tileX,
clt_parameters.tileY); clt_parameters.tileY);
*/ */
}
} else {
}
} // if (debugLevel > 100) {
} else { //if (clt_parameters.plIterations > 0)
st.planes_mod = st.planes; // just use the measured ones st.planes_mod = st.planes; // just use the measured ones
} }
// filter out weak planes, create boolean array [per-supertile][per disparity plane] // filter out weak planes, create boolean array [per-supertile][per disparity plane]
......
...@@ -416,7 +416,8 @@ public class TileSurface { ...@@ -416,7 +416,8 @@ public class TileSurface {
final int nStiles = stilesX * stilesY; final int nStiles = stilesX * stilesY;
final int nTiles = nStiles * superTileSize * superTileSize; final int nTiles = nStiles * superTileSize * superTileSize;
final double [][][][] fused_data = new double [nStiles][][][]; final double [][][][] fused_data = new double [nStiles][][][];
final Thread[] threads = ImageDtt.newThreadArray(threadsMax); // final Thread[] threads = ImageDtt.newThreadArray(threadsMax);
final Thread[] threads = ImageDtt.newThreadArray((debugLevel > 1)? 1 : threadsMax);
final AtomicInteger ai = new AtomicInteger(0); final AtomicInteger ai = new AtomicInteger(0);
final int dbg_tile = dbg_Y * stilesX + dbg_X; final int dbg_tile = dbg_Y * stilesX + dbg_X;
for (int ithread = 0; ithread < threads.length; ithread++) { for (int ithread = 0; ithread < threads.length; ithread++) {
...@@ -424,7 +425,8 @@ public class TileSurface { ...@@ -424,7 +425,8 @@ public class TileSurface {
public void run() { public void run() {
for (int nsTile = ai.getAndIncrement(); nsTile < nStiles; nsTile = ai.getAndIncrement()) { for (int nsTile = ai.getAndIncrement(); nsTile < nStiles; nsTile = ai.getAndIncrement()) {
if (planes[nsTile] != null) { if (planes[nsTile] != null) {
int dl = ((debugLevel > -1) && (nsTile == dbg_tile)) ? 3:0; // int dl = ((debugLevel > -1) && (nsTile == dbg_tile)) ? 3:0;
int dl = ((debugLevel > 1) && (nsTile == dbg_tile)) ? 3: debugLevel;
if (dl > 0){ if (dl > 0){
System.out.println("fuseSupertilePlanes(), nsTile = "+nsTile); System.out.println("fuseSupertilePlanes(), nsTile = "+nsTile);
} }
...@@ -495,7 +497,7 @@ public class TileSurface { ...@@ -495,7 +497,7 @@ public class TileSurface {
} }
} }
fused_data[nsTile] = disp_strength; fused_data[nsTile] = disp_strength;
if ((debugLevel > -1) && (dl>0)){ if ((debugLevel > -1) && (dl > 0)){
String[] titles = new String [3 * disp_strength.length]; String[] titles = new String [3 * disp_strength.length];
double [][] dbg_img = new double [titles.length][]; double [][] dbg_img = new double [titles.length][];
for (int i = 0; i < disp_strength.length; i++) { for (int i = 0; i < disp_strength.length; i++) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment