Commit 857dfbb8 authored by Andrey Filippov's avatar Andrey Filippov

Dealing with low-textured areas with dual quad camera

parent 83946ca3
/**
** BiCamDSI - Building DSI using correlation between two quad cameras
**
** Copyright (C) 2018 Elphel, Inc.
**
** -----------------------------------------------------------------------------**
**
** TwoQuadCLT.java is free software: you can redistribute it and/or modify
** it under the terms of the GNU General Public License as published by
** the Free Software Foundation, either version 3 of the License, or
** (at your option) any later version.
**
** This program is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
** GNU General Public License for more details.
**
** You should have received a copy of the GNU General Public License
** along with this program. If not, see <http://www.gnu.org/licenses/>.
** -----------------------------------------------------------------------------**
**
*/
import java.util.ArrayList;
public class BiCamDSI {
// public int tilesX;
// public int tilesY;
TileNeibs tnImage; // = new TileNeibs(tilesX, tilesY)
public BiCamDSI(
int tilesX,
int tilesY) {
tnImage = new TileNeibs(tilesX, tilesY);
}
public boolean [] getLTTrusted(
double [][] disparity_bimap,
double min_disparity, // = 0.0; // apply low texture to near objects
double trusted_strength, // = 0.2; // strength sufficient without neighbors
double need_friends, // = 0.4; // strength sufficient with neighbors support, fraction of lt_trusted_strength
double friends_diff, // = 0.15; // pix difference to neighbors to be considered a match (TODO: use tilted)
double friends_rdiff, // = 0.04; // additional relative pix per pixel of disparity
int min_friends_any, // = 2; // minimal number of even weak friends
int min_friends_trusted, // = 2; // minimal number of trusted (strong or already confirmed)
int friends_dist, // = 3; // how far to look for friends
int debugLevel
) {
final double cond_strength = trusted_strength * need_friends;
final double [] disparity = disparity_bimap[ImageDtt.BI_TARGET_INDEX];
final double [] strength = disparity_bimap[ImageDtt.BI_STR_CROSS_INDEX];
final boolean [] trusted = new boolean[strength.length];
final boolean [] cond_trusted = trusted.clone();
for (int nTile = 0; nTile < trusted.length; nTile ++) if (strength[nTile] >= cond_strength){
cond_trusted[nTile] = true;
trusted[nTile] = strength[nTile] >= trusted_strength;
}
for (int new_tiles = 0; ; new_tiles = 0) {
for (int nTile = 0; nTile < trusted.length; nTile ++) {
if (cond_trusted[nTile] && !trusted[nTile]) {
int num_trusted = 0;
int num_friends = 0;
double low_friend = disparity[nTile] - friends_diff - friends_rdiff*disparity[nTile];
double high_friend = disparity[nTile] + friends_diff + friends_rdiff*disparity[nTile];
label_tile:
{
for (int dy = -friends_dist; dy <= friends_dist; dy++) {
for (int dx = -friends_dist; dx <= friends_dist; dx++) if ((dy!=0) ||(dx !=0)){
int nTile1 = tnImage.getNeibIndex(nTile, dx, dy);
if ((nTile1 >= 0) && (disparity[nTile1] >= low_friend) && (disparity[nTile1] <= high_friend)){ // disparity[nTile1] may be NaN!
if (cond_trusted[nTile1]) {
num_friends++;
if (num_friends >= min_friends_any){
trusted[nTile] = true;
new_tiles++;
break label_tile;
} else if (trusted[nTile1]) {
num_trusted++;
if (num_trusted >= min_friends_trusted){
trusted[nTile] = true;
new_tiles++;
break label_tile;
}
}
}
}
}
}
}
}
}
System.out.println("new tiles = "+new_tiles); // find out why second pass always returns 0
if (new_tiles == 0) break;
}
return trusted;
}
public int removeLTUntrusted(
double [][] disparity_bimap,
double min_disparity, // = 0.0; // apply low texture to near objects
double trusted_strength, // = 0.2; // strength sufficient without neighbors
double need_friends, // = 0.4; // strength sufficient with neighbors support, fraction of lt_trusted_strength
double friends_diff, // = 0.15; // pix difference to neighbors to be considered a match (TODO: use tilted)
double friends_rdiff, // = 0.04; // additional relative pix per pixel of disparity
int min_friends_any, // = 2; // minimal number of even weak friends
int min_friends_trusted, // = 2; // minimal number of trusted (strong or already confirmed)
int friends_dist, // = 3; // how far to look for friends
int debugLevel
) {
boolean [] trusted = getLTTrusted(
disparity_bimap, // double [][] disparity_bimap,
min_disparity, //double min_disparity, // = 0.0; // apply low texture to near objects
trusted_strength, //double trusted_strength, // = 0.2; // strength sufficient without neighbors
need_friends, //double need_friends, // = 0.4; // strength sufficient with neighbors support, fraction of lt_trusted_strength
friends_diff, //double friends_diff, // = 0.15; // pix difference to neighbors to be considered a match (TODO: use tilted)
friends_rdiff, //double friends_rdiff, // = 0.04; // additional relative pix per pixel of disparity
min_friends_any, //int min_friends_any, // = 2; // minimal number of even weak friends
min_friends_trusted, //int min_friends_trusted, // = 2; // minimal number of trusted (strong or already confirmed)
friends_dist, //int friends_dist, // = 3; // how far to look for friends
debugLevel); //int debugLevel
int num_trusted = 0;
for (int nTile = 0; nTile < trusted.length; nTile++) {
if (trusted[nTile]) {
num_trusted++;
} else {
disparity_bimap[ImageDtt.BI_TARGET_INDEX][nTile] = Double.NaN;
}
}
return num_trusted;
}
public double [] suggestLTTiles(
double [][] disparity_bimap,
boolean [] trusted, // may be null if disparity is alreasdy NaN-ed
double min_disparity, // = 0.0; // apply low texture to near objects
double trusted_strength, // = 0.2; // strength sufficient without neighbors
double need_friends, // = 0.4; // strength sufficient with neighbors support, fraction of lt_trusted_strength
// double friends_diff, // = 0.15; // pix difference to neighbors to be considered a match (TODO: use tilted)
// double friends_rdiff, // = 0.04; // additional relative pix per pixel of disparity
// int min_friends_any, // = 2; // minimal number of even weak friends
// int min_friends_trusted, // = 2; // minimal number of trusted (strong or already confirmed)
// int friends_dist, // = 3; // how far to look for friends
// boolean replace_lone, // = true; // try to overwrite lone weak
int extend_dist, // = 3; // how far to extend around known tiles (probably should increase this value up to?
// dealing with neighbors variance
double wsigma, // = 1.0; // influence of far neighbors diminish as a Gaussian with this sigma
double max_asigma, // = .15; // Maximal acceptable standard deviation of the neighbors (remove, then add)
double max_rsigma, // = .05; // Maximal acceptable standard deviation of the neighbors (remove, then add)
int debugLevel
) {
final double rsigma = max_rsigma; //pix/pix
final double asigma = max_asigma; // 1.0;
final double [][] weights = new double [extend_dist+1][extend_dist+1];
final double cond_strength = trusted_strength * need_friends;
final double strength_floor = 0.7*cond_strength;
for (int i = 0; i <weights.length; i++) {
for (int j = i; j <weights[i].length; j++) {
weights[i][j]=Math.exp(-(i*i+j*j)/(2*wsigma*wsigma));
weights[j][i] = weights[i][j];
}
}
final double [] disparity = disparity_bimap[ImageDtt.BI_TARGET_INDEX];
final double [] strength = disparity_bimap[ImageDtt.BI_STR_CROSS_INDEX];
if (trusted == null) {
trusted = new boolean[disparity.length];
for (int nTile = 0; nTile < trusted.length; nTile++) trusted[nTile] = !Double.isNaN(disparity[nTile]);
}
// final boolean [] trusted = new boolean[strength.length];
// final boolean [] cond_trusted = trusted.clone();
// for (int nTile = 0; nTile < trusted.length; nTile ++) if (strength[nTile] >= cond_strength){
// cond_trusted[nTile] = true;
// trusted[nTile] = strength[nTile] >= trusted_strength;
// }
double sigma = asigma;
double sigma2 = sigma*sigma;
final double [] to_measure = new double [disparity.length];
for (int nTile = 0; nTile < to_measure.length; nTile++) {
to_measure[nTile] = Double.NaN;
}
/*
for (int new_tiles = 0; ; new_tiles = 0) {
for (int nTile = 0; nTile < trusted.length; nTile ++) {
if (cond_trusted[nTile] && !trusted[nTile]) {
int num_trusted = 0;
int num_friends = 0;
double low_friend = disparity[nTile] - friends_diff - friends_rdiff*disparity[nTile];
double high_friend = disparity[nTile] + friends_diff + friends_rdiff*disparity[nTile];
label_tile:
{
for (int dy = -friends_dist; dy <= friends_dist; dy++) {
for (int dx = -friends_dist; dx <= friends_dist; dx++) if ((dy!=0) ||(dx !=0)){
int nTile1 = tnImage.getNeibIndex(nTile, dx, dy);
if ((nTile1 >= 0) && (disparity[nTile1] >= low_friend) && (disparity[nTile1] <= high_friend)){
if (cond_trusted[nTile1]) {
num_friends++;
if (num_friends >= min_friends_any){
trusted[nTile] = true;
new_tiles++;
break label_tile;
} else if (trusted[nTile1]) {
num_trusted++;
if (num_trusted >= min_friends_trusted){
trusted[nTile] = true;
new_tiles++;
break label_tile;
}
}
}
}
}
}
}
}
}
System.out.println("new tiles = "+new_tiles);
if (new_tiles == 0) break;
}
boolean [] trusted = getLTTrusted(
disparity_bimap, // double [][] disparity_bimap,
min_disparity, //double min_disparity, // = 0.0; // apply low texture to near objects
trusted_strength, //double trusted_strength, // = 0.2; // strength sufficient without neighbors
need_friends, //double need_friends, // = 0.4; // strength sufficient with neighbors support, fraction of lt_trusted_strength
friends_diff, //double friends_diff, // = 0.15; // pix difference to neighbors to be considered a match (TODO: use tilted)
friends_rdiff, //double friends_rdiff, // = 0.04; // additional relative pix per pixel of disparity
min_friends_any, //int min_friends_any, // = 2; // minimal number of even weak friends
min_friends_trusted, //int min_friends_trusted, // = 2; // minimal number of trusted (strong or already confirmed)
friends_dist, //int friends_dist, // = 3; // how far to look for friends
debugLevel); //int debugLevel
final boolean [] dbg_trusted = trusted.clone();
*/
final boolean [] candidates = new boolean[strength.length];
// can be done faster if jump on empty by square side
for (int nTile = 0; nTile < candidates.length; nTile++) if (!trusted[nTile]){
label_tile1:
for (int dy = -extend_dist; dy <= extend_dist; dy++) {
for (int dx = -extend_dist; dx <= extend_dist; dx++) if ((dy!=0) ||(dx !=0)){
int nTile1 = tnImage.getNeibIndex(nTile, dx, dy);
if ((nTile1 >= 0) && trusted[nTile1]){
candidates[nTile] = true;
break label_tile1;
}
}
}
}
/*
if (debugLevel > -2) {
for (int nTile = 0; nTile < trusted.length; nTile ++) {
disparity_bimap[ImageDtt.BI_DBG1_INDEX][nTile] = Double.NaN;
disparity_bimap[ImageDtt.BI_DBG2_INDEX][nTile] = Double.NaN;
disparity_bimap[ImageDtt.BI_DBG3_INDEX][nTile] = Double.NaN;
disparity_bimap[ImageDtt.BI_DBG4_INDEX][nTile] = Double.NaN;
if (trusted[nTile]) {
disparity_bimap[ImageDtt.BI_DBG1_INDEX][nTile] = disparity[nTile];
disparity_bimap[ImageDtt.BI_DBG2_INDEX][nTile] = disparity[nTile];
disparity_bimap[ImageDtt.BI_DBG4_INDEX][nTile] = 2.0;
// if (dbg_trusted[nTile])disparity_bimap[ImageDtt.BI_DBG4_INDEX][nTile] = 3.0;
// } else if (cond_trusted[nTile]) {
// disparity_bimap[ImageDtt.BI_DBG4_INDEX][nTile] = 1.0;
}
}
for (int nTile = 0; nTile < trusted.length; nTile ++) {
if (candidates[nTile]) disparity_bimap[ImageDtt.BI_DBG4_INDEX][nTile] = 0.0;
}
(new showDoubleFloatArrays()).showArrays(
disparity_bimap,
tnImage.sizeX,
tnImage.sizeY,
true,
"BiCamDSI",
ImageDtt.BIDISPARITY_TITLES);
}
*/
int num_sigma = 0;
for (int nTile = 0; nTile < candidates.length; nTile++) if (candidates[nTile]){
ArrayList<Integer> sample_list = new ArrayList<Integer>();
double s0 = 0.0, s1 = 0.0, s2 = 0.0;
for (int dy = -extend_dist; dy <= extend_dist; dy++) {
for (int dx = -extend_dist; dx <= extend_dist; dx++) if ((dy!=0) ||(dx !=0)){
int nTile1 = tnImage.getNeibIndex(nTile, dx, dy);
if ((nTile1 >= 0) && trusted[nTile1]){
double w = (strength[nTile1] - strength_floor) * weights[(dy>0)?dy:-dy][(dx>0)?dx:-dx];
s0 += w;
s1 += w * disparity[nTile1];
s2 += w * disparity[nTile1] * disparity[nTile1];
sample_list.add(nTile1);
}
}
}
if (s0 <= 0) {
System.out.println("suggestLTTiles() BUG? nTile = "+nTile+" s0 ="+s0);
continue;
}
double s_mean = s1/s0;
double smpl_var = s2/s0 -s_mean*s_mean;
// if (debugLevel > -2) {
// disparity_bimap[ImageDtt.BI_DBG2_INDEX][nTile] = s_mean;
// disparity_bimap[ImageDtt.BI_DBG3_INDEX][nTile] = Math.sqrt(smpl_var);
// }
// final double rsigma = 0.05; //pix/pix
// final double asigma = max_sigma; // 1.0;
sigma = asigma + rsigma * s_mean;
sigma2 = sigma*sigma;
// FIXME: use tilted planes
if (smpl_var > sigma2) {
if (debugLevel > -2) {
// System.out.print ((nTile%tnImage.sizeX)+"/"+(nTile/tnImage.sizeX)+": s_mean = "+s_mean+", smpl_var = "+smpl_var+" ... "+ " ntiles="+(sample_list.size()));
System.out.print (String.format("%3d/%3d mean=%8f sigma2=%f var=%8f tiles=%3d ",nTile%tnImage.sizeX, nTile/tnImage.sizeX, s_mean, sigma2, smpl_var, sample_list.size()));
}
num_sigma++;
ArrayList<Integer> rejected_list = new ArrayList<Integer>();
int [] xy0 = tnImage.getXY(nTile);
while (smpl_var > sigma2) {
// find worst sample
int worst_indx = -1;
double worst_diff = 0;
double d;
for (int i = 0; i < sample_list.size(); i++) {
int nTile1 = sample_list.get(i);
d = Math.abs(disparity[nTile1] - s_mean);
if (d > worst_diff) {
worst_diff = d;
worst_indx = i;
}
}
// remove worst sample, add to reject list
int nTile1 = sample_list.get(worst_indx);
rejected_list.add(nTile1);
sample_list.remove(worst_indx);
// recalculate statistics
int [] xy = tnImage.getXY(nTile1);
int dx =xy[0] - xy0[0];
int dy =xy[1] - xy0[1];
double w = (strength[nTile1] - strength_floor) * weights[(dy>0)?dy:-dy][(dx>0)?dx:-dx];
s0 -= w;
s1 -= w * disparity[nTile1];
s2 -= w * disparity[nTile1] * disparity[nTile1];
s_mean = s1/s0;
smpl_var = s2/s0 -s_mean*s_mean;
}
if (debugLevel > -2) {
// System.out.print (" -> s_mean = "+s_mean+", smpl_var = "+smpl_var+" ... "+ " ntiles="+(sample_list.size()));
System.out.print (String.format(" (-)-> mean=%8f var=%8f tiles=%3d ",s_mean, smpl_var, sample_list.size()));
}
// Try to add best of the rejected back (trying to deal with 2-maximums histogram)
while (smpl_var < sigma2) { // then remove last added
// find best rejected sample
int best_indx = -1;
double best_diff = 0;
double d;
for (int i = 0; i < rejected_list.size(); i++) {
int nTile1 = rejected_list.get(i);
d = Math.abs(disparity[nTile1] - s_mean);
if ((best_indx <0) || (d < best_diff)) {
best_diff = d;
best_indx = i;
}
}
// restore best rejected sample
int nTile1 = rejected_list.remove(best_indx);
sample_list.add(nTile1); // best_indx); // will be last - easy to remove
// recalculate statistics
int [] xy = tnImage.getXY(nTile1);
int dx =xy[0] - xy0[0];
int dy =xy[1] - xy0[1];
double w = (strength[nTile1] - strength_floor) * weights[(dy>0)?dy:-dy][(dx>0)?dx:-dx];
s0 += w;
s1 += w * disparity[nTile1];
s2 += w * disparity[nTile1] * disparity[nTile1];
s_mean = s1/s0;
smpl_var = s2/s0 -s_mean*s_mean;
}
if (debugLevel > -2) {
System.out.print (String.format(" (+)-> mean=%8f var=%8f tiles=%3d ",s_mean, smpl_var, sample_list.size()));
}
// remove last added sample
// remove worst sample, add to reject list
int nTile1 = sample_list.get(sample_list.size()-1); // last added, no need to actually remove
// recalculate statistics
int [] xy = tnImage.getXY(nTile1);
int dx =xy[0] - xy0[0];
int dy =xy[1] - xy0[1];
double w = (strength[nTile1] - strength_floor) * weights[(dy>0)?dy:-dy][(dx>0)?dx:-dx];
s0 -= w;
s1 -= w * disparity[nTile1];
s2 -= w * disparity[nTile1] * disparity[nTile1];
s_mean = s1/s0;
smpl_var = s2/s0 -s_mean*s_mean;
if (debugLevel > -2) {
// System.out.println (" s_mean = "+s_mean+", smpl_var = "+smpl_var+ " ntiles="+(sample_list.size()-1) );
System.out.println (String.format(" => mean=%8f var=%8f tiles=%3d ", s_mean, smpl_var, sample_list.size()-1));
}
} // if (smpl_var > sigma2) {
to_measure[nTile] = s_mean;
if (debugLevel > -2) {
disparity_bimap[ImageDtt.BI_DBG2_INDEX][nTile] = s_mean;
disparity_bimap[ImageDtt.BI_DBG3_INDEX][nTile] = Math.sqrt(smpl_var);
}
}
if (debugLevel > -2) {
int num_to_measure =0;
for (int nTile = 0; nTile < to_measure.length; nTile++) {
if (!Double.isNaN(to_measure[nTile]))num_to_measure++;
}
System.out.println ("Updated sigma in tiles:"+num_sigma+" (sigma = "+sigma+", sigma2 = "+sigma2);
System.out.println ("Tiles to meaure:"+num_to_measure);
disparity_bimap[ImageDtt.BI_DBG3_INDEX] = to_measure; // overwrites old debug data
(new showDoubleFloatArrays()).showArrays(
disparity_bimap,
tnImage.sizeX,
tnImage.sizeY,
true,
"BiCamDSI-2",
ImageDtt.BIDISPARITY_TITLES);
}
return to_measure;
}
/*
* sigma2
static int BI_DISP_FULL_INDEX = 0; // 0 - disparity for all directions of the main camera
static int BI_DISP_HOR_INDEX = 1; // 1 - disparity for 2 horizontal pairs of the main camera
static int BI_DISP_VERT_INDEX = 2; // 2 - disparity for 2 vertical pairs of the main camera
static int BI_DISP_DIAGM_INDEX = 3; // 3 - disparity for main diagonal pair of the main camera
static int BI_DISP_DIAGO_INDEX = 4; // 4 - disparity for main diagonal pair of the main camera
static int BI_ADISP_FULL_INDEX = 5; // 5 - disparity for all directions of the aux camera
static int BI_ADISP_HOR_INDEX = 6; // 6 - disparity for 2 horizontal pairs of the aux camera
static int BI_ADISP_VERT_INDEX = 7; // 7 - disparity for 2 vertical pairs of the aux camera
static int BI_ADISP_DIAGM_INDEX = 8; // 8 - disparity for main diagonal pair of the aux camera
static int BI_ADISP_DIAGO_INDEX = 9; // 9 - disparity for main diagonal pair of the aux camera
static int BI_DISP_CROSS_INDEX = 10; //10 - disparity between the main the aux camera
static int BI_DISP_CROSS_DX_INDEX = 11; //11 - delta disparity between the main the aux camera (horizontal)
static int BI_DISP_CROSS_DY_INDEX = 12; //12 - delta disparity between the main the aux camera (vertical)
static int BI_STR_FULL_INDEX = 13; //13 - strength for all directions of the main camera
static int BI_STR_HOR_INDEX = 14; //14 - strength for 2 horizontal pairs of the main camera
static int BI_STR_VERT_INDEX = 15; //15 - strength for 2 vertical pairs of the main camera
static int BI_STR_DIAGM_INDEX = 16; //16 - strength for main diagonal pair of the main camera
static int BI_STR_DIAGO_INDEX = 17; //17 - strength for main diagonal pair of the main camera
static int BI_ASTR_FULL_INDEX = 18; //18 - strength for all directions of the aux camera
static int BI_ASTR_HOR_INDEX = 19; //19 - strength for 2 horizontal pairs of the aux camera
static int BI_ASTR_VERT_INDEX = 20; //20 - strength for 2 vertical pairs of the aux camera
static int BI_ASTR_DIAGM_INDEX = 21; //21 - strength for main diagonal pair of the aux camera
static int BI_ASTR_DIAGO_INDEX = 22; //22 - strength for main diagonal pair of the aux camera
static int BI_STR_CROSS_INDEX = 23; //23 - strength between the main the aux camera
static int BI_STR_ALL_INDEX = 24; //23 - average strength (product of strengths to 1/3 power), TODO: strength at cross disparity
static int BI_TARGET_INDEX = 25; //24 - target disparity
static int BI_DBG1_INDEX = 26; //26 - debug layer 1
static int BI_DBG2_INDEX = 27; //27 - debug layer 2
*/
}
......@@ -71,6 +71,23 @@ public class BiQuadParameters {
public double min_trusted_strength = 0.1;//14// Minimal trusted combo strength;
public double trusted_tolerance = 1.0; // Trusted tolerance for small baseline camera(s)
// rig LT (poor textured areas)
public double lt_min_disparity = 0.0; // apply low texture to near objects
public double lt_trusted_strength = 0.2; // strength sufficient without neighbors
public double lt_need_friends = 0.4; // strength sufficient with neighbors support, fraction of lt_trusted_strength
public double lt_friends_diff = 0.15; // pix difference to neighbors to be considered a match (TODO: use tilted)
public double lt_friends_rdiff = 0.04; // Add per each disparity pixel
public int lt_min_friends_any = 2; // minimal number of even weak friends
public int lt_min_friends_trusted = 2; // minimal number of trusted (strong or already confirmed)
public int lt_friends_dist = 3; // how far to look for friends
public boolean lt_replace_lone = true; // try to overwrite lone weak
public int lt_extend_dist = 3; // how far to extend around known tiles (probably should increase this value up to?
// dealing with neighbors variance
public double lt_wsigma = 1.0; // Reduce influence of far neighbors with this Gaussian sigma
public double lt_max_asigma = .15; // Maximal acceptable standard deviation of the neighbors (remove, then add)
public double lt_max_rsigma = .04; // Additional standard deviation for each pixel of disparity (relative)
public int ml_hwidth = 2; // Half-width of the ML tiles to export (0-> 1x1, 1->3x3, 2 -> 5x5)
public double ml_disparity_sweep = 2.0; // Disparity sweep around ground truth, each side
public int ml_sweep_steps = 5; // Number of disparity sweep steps
......@@ -83,6 +100,7 @@ public class BiQuadParameters {
public boolean ml_8bit= true; // output in 8-bit format (default - 32-bit TIFF
public double ml_limit_extrim = 0.00001; // ignore lowest and highest values when converting to 8 bpp
public boolean ml_show_ml = true; // show each generated MLoutput file
public double ml_fatzero = 0.05; // Use this value for correlation
......@@ -169,6 +187,38 @@ public class BiQuadParameters {
gd.addNumericField("Trusted tolerance for small baseline camera(s)", this.trusted_tolerance, 3,6,"",
"When downscaling valid residual disparity from the most sensitive inter-camera, do not reduce it to be lower than this");
gd.addTab("Rig LT","Deal with the low textured areas");
gd.addNumericField("Apply low texture to near objects (disparity above)", this.lt_min_disparity, 3,6,"pix",
"Handle low textured objects with disparity (main camera pixels) above this threshold");
gd.addNumericField("Inter-camera correlation strength sufficient without neighbors", this.lt_trusted_strength, 3,6,"",
"Consider such tiles valid regardless of surrounding tiles");
gd.addNumericField("Strength sufficient with neighbors support, fraction of the trusted (above)", this.lt_need_friends, 3,6,"",
"Tiles above theis inter-camera strength may be valid if they have same disparity neighbors");
gd.addNumericField("Maximal disparity difference to neighbors to be considered a match", this.lt_friends_diff, 3,6,"pix",
"TODO: use non fronto parallel surfaces (e.g. horizontal) ");
gd.addNumericField("Add to allowed disparity difference per each pixel of dipsarity", this.lt_friends_rdiff, 3,6,"pix/pix",
"Additional relative disparity difference");
gd.addNumericField("Minimal number of even weak friends sufficient for support", this.lt_min_friends_any, 0,3,"",
"Tiles having this number of same-disparity neighbors with strength above minimal are considered trusted");
gd.addNumericField("Minimal number of trusted (strong or already confirmed)", this.lt_min_friends_trusted, 0,3,"",
"Tiles having this number of same-disparity neighbors with trusted strength or already confirmed by other neighbors are trusted too");
gd.addNumericField("How far to look for friends", this.lt_friends_dist, 0,3,"",
"Look for matching tiles inside a square of 2*<this value>+1 around this tile");
gd.addCheckbox ("Discard lone weak", this.lt_replace_lone,
"Discard lone (no matching neighbors) tiles that have strength below trusted");
gd.addNumericField("How far to extend around known tiles", this.lt_extend_dist, 0,3,"",
"Try new tiles that have neighbors within a square around it");
gd.addNumericField("Reduce influence of far neighbors with this Gaussian sigma", this.lt_wsigma, 4,6,"pix",
"Neighbor weight is multipled by a Gaussian with this sigma around the new tile");
gd.addNumericField("Maximal acceptable standard deviation of the neighbors (remove, then add)", this.lt_max_asigma, 4,6,"pix",
"When multiple neighbors have different disparity, try to remove outliers, then add some back");
gd.addNumericField("Additional standard deviation for each pixel of disparity (relative)", this.lt_max_rsigma, 4,6,"pix/pix",
"Loosen sigma requirements for high disparity by adding this value for each 1 pixel of disparity");
gd.addTab("ML","Parameters related to the ML files generation for the dual-quad camera rig");
gd.addNumericField("Half-width of the ML tiles to export (0-> 1x1, 1->3x3, 2 -> 5x5)", this.ml_hwidth, 0,3,"",
......@@ -194,6 +244,8 @@ public class BiQuadParameters {
"Use values histogram to find min/max values, ignoring(limiting) this fraction (parts per million) of pixels at both extremes");
gd.addCheckbox ("Show each generated ML file", this.ml_show_ml,
"Use only for small number of generated files to reduce memory usage");
gd.addNumericField("Use this phase correlation fat zero when generating ML files", this.ml_fatzero, 5,8,"",
"Replace normal fat zero value used elsethere");
}
public void dialogAnswers(GenericJTabbedDialog gd) {
......@@ -240,6 +292,20 @@ public class BiQuadParameters {
this.min_trusted_strength= gd.getNextNumber();
this.trusted_tolerance= gd.getNextNumber();
this.lt_min_disparity= gd.getNextNumber();
this.lt_trusted_strength= gd.getNextNumber();
this.lt_need_friends= gd.getNextNumber();
this.lt_friends_diff= gd.getNextNumber();
this.lt_friends_rdiff= gd.getNextNumber();
this.lt_min_friends_any= (int) gd.getNextNumber();
this.lt_min_friends_trusted= (int) gd.getNextNumber();
this.lt_friends_dist= (int) gd.getNextNumber();
this.lt_replace_lone= gd.getNextBoolean();
this.lt_extend_dist= (int) gd.getNextNumber();
this.lt_wsigma= gd.getNextNumber();
this.lt_max_asigma= gd.getNextNumber();
this.lt_max_rsigma= gd.getNextNumber();
this.ml_hwidth= (int) gd.getNextNumber();
this.ml_disparity_sweep= gd.getNextNumber();
this.ml_sweep_steps= (int) gd.getNextNumber();
......@@ -251,6 +317,7 @@ public class BiQuadParameters {
this.ml_8bit= gd.getNextBoolean();
this.ml_limit_extrim= gd.getNextNumber() * 1E-6;
this.ml_show_ml= gd.getNextBoolean();
this.ml_fatzero= gd.getNextNumber();
}
public void setProperties(String prefix,Properties properties){
......@@ -299,6 +366,20 @@ public class BiQuadParameters {
properties.setProperty(prefix+"min_trusted_strength", this.min_trusted_strength+"");
properties.setProperty(prefix+"trusted_tolerance", this.trusted_tolerance+"");
properties.setProperty(prefix+"lt_min_disparity", this.lt_min_disparity+"");
properties.setProperty(prefix+"lt_trusted_strength", this.lt_trusted_strength+"");
properties.setProperty(prefix+"lt_need_friends", this.lt_need_friends+"");
properties.setProperty(prefix+"lt_friends_diff", this.lt_friends_diff+"");
properties.setProperty(prefix+"lt_friends_rdiff", this.lt_friends_rdiff+"");
properties.setProperty(prefix+"lt_min_friends_any", this.lt_min_friends_any+"");
properties.setProperty(prefix+"lt_min_friends_trusted", this.lt_min_friends_trusted+"");
properties.setProperty(prefix+"lt_friends_dist", this.lt_friends_dist+"");
properties.setProperty(prefix+"lt_replace_lone", this.lt_replace_lone+"");
properties.setProperty(prefix+"lt_extend_dist", this.lt_extend_dist+"");
properties.setProperty(prefix+"lt_wsigma", this.lt_wsigma+"");
properties.setProperty(prefix+"lt_max_asigma", this.lt_max_asigma+"");
properties.setProperty(prefix+"lt_max_rsigma", this.lt_max_rsigma+"");
properties.setProperty(prefix+"ml_hwidth", this.ml_hwidth+"");
properties.setProperty(prefix+"ml_disparity_sweep", this.ml_disparity_sweep+"");
properties.setProperty(prefix+"ml_sweep_steps", this.ml_sweep_steps+"");
......@@ -310,8 +391,7 @@ public class BiQuadParameters {
properties.setProperty(prefix+"ml_8bit", this.ml_8bit+"");
properties.setProperty(prefix+"ml_limit_extrim", this.ml_limit_extrim+"");
properties.setProperty(prefix+"ml_show_ml", this.ml_show_ml+"");
properties.setProperty(prefix+"ml_fatzero", this.ml_fatzero+"");
}
public void getProperties(String prefix,Properties properties){
if (properties.getProperty(prefix+"rig_mode_debug")!=null) this.rig_mode_debug=Boolean.parseBoolean(properties.getProperty(prefix+"rig_mode_debug"));
......@@ -357,6 +437,20 @@ public class BiQuadParameters {
if (properties.getProperty(prefix+"trusted_tolerance")!=null) this.trusted_tolerance=Double.parseDouble(properties.getProperty(prefix+"trusted_tolerance"));
if (properties.getProperty(prefix+"ml_hwidth")!=null) this.ml_hwidth=Integer.parseInt(properties.getProperty(prefix+"ml_hwidth"));
if (properties.getProperty(prefix+"lt_min_disparity")!=null) this.lt_min_disparity=Double.parseDouble(properties.getProperty(prefix+"lt_min_disparity"));
if (properties.getProperty(prefix+"lt_trusted_strength")!=null) this.lt_trusted_strength=Double.parseDouble(properties.getProperty(prefix+"lt_trusted_strength"));
if (properties.getProperty(prefix+"lt_need_friends")!=null) this.lt_need_friends=Double.parseDouble(properties.getProperty(prefix+"lt_need_friends"));
if (properties.getProperty(prefix+"lt_friends_diff")!=null) this.lt_friends_diff=Double.parseDouble(properties.getProperty(prefix+"lt_friends_diff"));
if (properties.getProperty(prefix+"lt_friends_rdiff")!=null) this.lt_friends_rdiff=Double.parseDouble(properties.getProperty(prefix+"lt_friends_rdiff"));
if (properties.getProperty(prefix+"lt_min_friends_any")!=null) this.lt_min_friends_any=Integer.parseInt(properties.getProperty(prefix+"lt_min_friends_any"));
if (properties.getProperty(prefix+"lt_min_friends_trusted")!=null) this.lt_min_friends_trusted=Integer.parseInt(properties.getProperty(prefix+"lt_min_friends_trusted"));
if (properties.getProperty(prefix+"lt_friends_dist")!=null) this.lt_friends_dist=Integer.parseInt(properties.getProperty(prefix+"lt_friends_dist"));
if (properties.getProperty(prefix+"lt_replace_lone")!=null) this.lt_replace_lone=Boolean.parseBoolean(properties.getProperty(prefix+"lt_replace_lone"));
if (properties.getProperty(prefix+"lt_extend_dist")!=null) this.lt_extend_dist=Integer.parseInt(properties.getProperty(prefix+"lt_extend_dist"));
if (properties.getProperty(prefix+"lt_wsigma")!=null) this.lt_wsigma=Double.parseDouble(properties.getProperty(prefix+"lt_wsigma"));
if (properties.getProperty(prefix+"lt_max_asigma")!=null) this.lt_max_asigma=Double.parseDouble(properties.getProperty(prefix+"lt_max_sigma"));
if (properties.getProperty(prefix+"lt_max_rsigma")!=null) this.lt_max_rsigma=Double.parseDouble(properties.getProperty(prefix+"lt_max_sigma"));
if (properties.getProperty(prefix+"ml_disparity_sweep")!=null) this.ml_disparity_sweep=Double.parseDouble(properties.getProperty(prefix+"ml_disparity_sweep"));
if (properties.getProperty(prefix+"ml_sweep_steps")!=null) this.ml_sweep_steps=Integer.parseInt(properties.getProperty(prefix+"ml_sweep_steps"));
if (properties.getProperty(prefix+"ml_keep_aux")!=null) this.ml_keep_aux=Boolean.parseBoolean(properties.getProperty(prefix+"ml_keep_aux"));
......@@ -367,6 +461,7 @@ public class BiQuadParameters {
if (properties.getProperty(prefix+"ml_8bit")!=null) this.ml_8bit=Boolean.parseBoolean(properties.getProperty(prefix+"ml_8bit"));
if (properties.getProperty(prefix+"ml_limit_extrim")!=null) this.ml_limit_extrim=Double.parseDouble(properties.getProperty(prefix+"ml_limit_extrim"));
if (properties.getProperty(prefix+"ml_show_ml")!=null) this.ml_show_ml=Boolean.parseBoolean(properties.getProperty(prefix+"ml_show_ml"));
if (properties.getProperty(prefix+"ml_fatzero")!=null) this.ml_fatzero=Double.parseDouble(properties.getProperty(prefix+"ml_fatzero"));
}
@Override
public BiQuadParameters clone() throws CloneNotSupportedException {
......@@ -413,6 +508,20 @@ public class BiQuadParameters {
bqp.min_trusted_strength= this.min_trusted_strength;
bqp.trusted_tolerance= this.trusted_tolerance;
bqp.lt_min_disparity= this.lt_min_disparity;
bqp.lt_trusted_strength= this.lt_trusted_strength;
bqp.lt_need_friends= this.lt_need_friends;
bqp.lt_friends_diff= this.lt_friends_diff;
bqp.lt_friends_rdiff= this.lt_friends_rdiff;
bqp.lt_min_friends_any= this.lt_min_friends_any;
bqp.lt_min_friends_trusted= this.lt_min_friends_trusted;
bqp.lt_friends_dist= this.lt_friends_dist;
bqp.lt_replace_lone= this.lt_replace_lone;
bqp.lt_extend_dist= this.lt_extend_dist;
bqp.lt_wsigma= this.lt_wsigma;
bqp.lt_max_asigma= this.lt_max_asigma;
bqp.lt_max_rsigma= this.lt_max_rsigma;
bqp.ml_hwidth= this.ml_hwidth;
bqp.ml_disparity_sweep= this.ml_disparity_sweep;
bqp.ml_sweep_steps= this.ml_sweep_steps;
......@@ -424,6 +533,7 @@ public class BiQuadParameters {
bqp.ml_8bit= this.ml_8bit;
bqp.ml_limit_extrim= this.ml_limit_extrim;
bqp.ml_show_ml= this.ml_show_ml;
bqp.ml_fatzero = this.ml_fatzero;
return bqp;
}
}
import java.util.ArrayList;
import Jama.Matrix;
/**
**
** Correlation2dLMA - Fit multi - baseline correaltion pairs to the model
**
** Copyright (C) 2018 Elphel, Inc.
**
** -----------------------------------------------------------------------------**
**
** CorrelationRigLMA.java is free software: you can redistribute it and/or modify
** it under the terms of the GNU General Public License as published by
** the Free Software Foundation, either version 3 of the License, or
** (at your option) any later version.
**
** This program is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
** GNU General Public License for more details.
**
** You should have received a copy of the GNU General Public License
** along with this program. If not, see <http://www.gnu.org/licenses/>.
** -----------------------------------------------------------------------------**
**
*/
/*
* Fitting parabola (use value_to_weight to discount off-maximum samples) for multiple cameras, such as a pair of quad cams
* It takes 2d correlations, uses common offsets dependence for x and y offsets on disparity
*
* Fits the following commonn (to all pairs) parameters:
* A - overlall strength
* Sx, Sy, Sxy - reverse widths
* d - disparity
*
* Each pair may have 3 other parameters, with a commom mask and regularization coefficients (2) to punish for not being == 0:
* M[i] - additive term (scaled with A
* X[i], Y[i] - additional x, y ofsfet caused my mis-alignment of the cameras
*
* 2-d function to be fitted for each pair is (x,y - coordinates in 2d correlation, relative to the center):
* f(x,y) = A * (M[i] + (1 - (Sx^2*(x-x0)^2 + Sy^2*(y-y0)^2+Sxy*(x-x0)*(y-y0)))), where
* x0 = X[i] - d * Kx[i]
* y0 = Y[i] - d * Ky[i]
*
* Kx, Ky are calculated in advance, form geometry andrelative disparity (1.0 for the main camera, ~0.6 for aux, ~5.0 for inter)
* Kxy={
* {1, 0}, // top pair
* {1, 0}, // bottom pair
* {0, 1}, // left pair
* {0, 1}, // right pair
* {1, 1}, // main diagonal pair
* {1,-1}} // other diagonal pair
*/
public class CorrelationRigLMA {
static final double [][] QUAD_KXY = {
{1.0, 0.0}, // top pair
{1.0, 0.0}, // bottom pair
{0.0, 1.0}, // left pair
{0.0, 1.0}, // right pair
{1.0, 1.0}, // main diagonal pair
{1.0,-1.0}}; // other diagonal pair
static final int INDEX_DISP = 0;
static final int INDEX_DA = 1;
static final int INDEX_DSX = 2;
static final int INDEX_DSY = 3;
static final int INDEX_DSXY = 4;
static final int INDEX_DM = 5;
static final int INDEX_DX0 = 6;
static final int INDEX_DY0 = 7;
static final int INDEX_LEN = 8;
static final String [] PAR_NAMES = {"Disparity","Amplitude","sharpness-X","sharpness-Y","Narrowness-XY", "shift", "dx", "dy"};
static final String [] PAIR_NAMES = {"top","bottom","left","right","diagm","diago"};
static final String [] CAM_NAMES = {"main","aux","inter"};
double [][] Kxy = null;
boolean use_master = true;
boolean use_aux = true;
boolean use_inter = true;
double default_width = 2.0; // pix
boolean adjust_M = false;
boolean adjust_xy = false;
double cost_width = 0.0;
double cost_max_value = 0.0;
double cost_xy = 0.0;
double weight_inter = 4.0; // relative weight of inter-camera correlation to a pair correlation.
double [][] corr_data = null;
int num_pairs; // number of correlation pairs
int num_pair_pars; // number of per-pair individual paramers (0,1,2,3 - m, xy)
int [][] pair_par_index = null; // reverse table - from parameter number to pair (-1 - global) and derivative index ([0] - not used?)
int [][] mxy_pair_index = null; // from pair to parameter number for M, and DX (-1 - none)
ArrayList<ArrayList<Sample>> samples;
double [] y_vector; // measured values and trailing zeros for regularization (0nly m, xy, no width)
double [] w_vector; // weights maching y
double weights_pure; // sum of sampes weights (1.0 - regularization weights)
double [][] xy_vector; // x and y coordinates in the 2d correlation arrays
int [] pair_vector; // number of a pair for sample
double [] vector;
// next values are only updated after success
double [] last_rms = null; // {rms, rms_pure}, matching this.vector
double [] good_or_bad_rms = null; // just for diagnostics, to read last (failed) rms
double [] initial_rms = null; // {rms, rms_pure}, first-calcualted rms
double [] last_ymfx = null;
double [][] last_jt = null;
// boolean input_diag = false; // valid during adding samples, should be set before changing groups
// double [] poly_coeff = null; // 6 elements - Xc, Yx, f(x,y), A, B, C (from A*x^2 + B*y^2 +C*x*y+...)
// double [] poly_xyvwh = null; // result of 2-d polynomial approximation instead of the LMA - used for lazy eye correction
class Sample{
double x; // x coordinate on the common scale (corresponding to the largest baseline), along the disparity axis
double y; // y coordinate (0 - disparity axis)
double v; // correlation value at that point
double w;
Sample (
double x, // x coordinate on the common scale (corresponding to the largest baseline), along the disparity axis
double y, // y coordinate (0 - disparity axis)
double v, // correlation value at that point
double w) // baseline scale index
{
this.x = x;
this.y = y;
this.v = v;
this.w = w;
}
}
public void initRigKx(
double rel_disp_aux,
double rel_disp_inter_x, // for aux camera to the right of the main rel_disp_inter_x>0, rel_disp_inter_y = 0
double rel_disp_inter_y) {
int num_cam_pairs = QUAD_KXY.length;
Kxy = new double [2 * num_cam_pairs+1][]; // 2 cameras with 6 pairs and one inter-camera pair
for (int i = 0; i < num_cam_pairs; i++) {
Kxy[i ][0] = QUAD_KXY[i][0];
Kxy[i ][1] = QUAD_KXY[i][1];
Kxy[i + num_cam_pairs][0] = QUAD_KXY[i][0] * rel_disp_aux;
Kxy[i + num_cam_pairs][1] = QUAD_KXY[i][0] * rel_disp_aux;
}
Kxy[2 * num_cam_pairs][0] = rel_disp_inter_x; //
Kxy[2 * num_cam_pairs][1] = rel_disp_inter_y; // 0.0 for horizontal pair
}
public void setup( // prepare once for all tiles
boolean use_master,
boolean use_aux,
boolean use_inter,
boolean adjust_max_value,
boolean adjust_xy,
double default_width,
double cost_width,
double cost_max_value,
double cost_xy,
double weight_inter) {
this.use_master = use_master;
this.use_aux = use_aux;
this.use_inter = use_inter;
this.adjust_M = adjust_max_value;
this.adjust_xy = adjust_xy;
this.default_width = default_width;
this.cost_width = cost_width;
this.cost_max_value = cost_max_value;
this.cost_xy = cost_xy;
this.weight_inter = weight_inter;
setupMask();
corr_data = new double [pair_par_index.length][];
samples = new ArrayList<ArrayList<Sample>>();
for (int i = 0; i < Kxy.length; i++) samples.add((Kxy[i] != null)?(new ArrayList<Sample>()):null);
}
public void addSample(
int ncam, // 0 - main, 1 - aux, 2 - inter
int npair,
double x, // x coordinate on the common scale (corresponding to the largest baseline), along the disparity axis
double y, // y coordinate (0 - disparity axis)
double v, // correlation value at that point
double w) {
if (ncam == 2) w *= weight_inter;
samples.get(QUAD_KXY.length * ncam + npair).add(new Sample(x,y,v,w));
}
public void setYW() {
int num_samples =0;
for (ArrayList<Sample> als : samples) {
if (als != null) num_samples += als.size();
}
int num_reg = num_pairs * num_pair_pars;
w_vector = new double [num_samples + num_reg];
y_vector = new double [w_vector.length];
xy_vector = new double [w_vector.length][];
int ns = 0;
double sw = 0.0;
double w_ind = (adjust_M? cost_max_value : 0.0) + 2 * (adjust_xy? cost_xy : 0.0); // for all tiles
weights_pure = 1.0 - w_ind;
double wm = cost_max_value/num_pairs;
double wxy = cost_xy/num_pairs;
for (int pair = 0; pair < samples.size(); pair++) {
ArrayList<Sample> als = samples.get(pair);
if (als != null) {
for (Sample sample : als) {
w_vector[ns] = sample.w;
sw += sample.w;
y_vector[ns] = sample.v;
xy_vector[ns] = new double[2];
xy_vector[ns][0] = sample.x;
xy_vector[ns][1] = sample.y;
pair_vector[ns] = pair;
ns++;
}
}
}
for (int np = 0; np < num_pairs; np++) {
if (adjust_M) w_vector[ns++] = wm;
if (adjust_xy) {
w_vector[ns++] = wxy;
w_vector[ns++] = wxy;
}
}
if (sw > 0.0) {
double kw = weights_pure/sw;
for (int i = 0; i < num_samples; i++) w_vector[i] *= kw;
}
}
double [] getFxAndJacobian(
double [] vector,
double [][] jt) { // should be initialized as double [<number of parameters>][] or null
int num_points = w_vector.length;
double [] fx = new double [num_points];
double [] derivs = null;
if (jt != null) {
for (int i= 0; i< jt.length; i++) jt[i] = new double [num_points];
derivs = new double[INDEX_LEN];
}
int num_reg = num_pairs * num_pair_pars;
int ns;
for (ns = 0; ns < num_points - num_reg; ns ++) {
int np = pair_vector[ns];
double M = (mxy_pair_index[np][0] >= 0)? vector[mxy_pair_index[np][0]]:Double.NaN;
double [] Dxy = null;
int xy_index = mxy_pair_index[np][1]; // pair number to parameter index (0 - m, 1 - x in xy)
if (xy_index >= 0) {
Dxy = new double[2];
Dxy[0] = vector[xy_index];
Dxy[1] = vector[xy_index+1];
}
fx[ns] = getFxAndDerivatives(
np, // int pair,
xy_vector[ns][0], // double x,
xy_vector[ns][1], // double y,
vector[INDEX_DISP], // double disparity,
vector[INDEX_DA], // double A,
vector[INDEX_DSX], // double Sx,
vector[INDEX_DSY], // double Sy,
vector[INDEX_DSXY], // double Sxy,
M, // double M, // NaN - assumed 0
Dxy, // double [] Dxy,// null - assumed {0,0}
derivs); // double [] deriv);
// save jacobian results - just for the relevant parameters
for (int nderiv = 0; nderiv <INDEX_DM; nderiv++) {
jt[nderiv][ns] = derivs[nderiv]; // common parameters
}
if (mxy_pair_index[np][0] >= 0) {
jt[mxy_pair_index[np][0]][ns] = derivs[INDEX_DM];
}
if (xy_index >= 0) {
jt[xy_index ][ns] = derivs[INDEX_DX0];
jt[xy_index+1][ns] = derivs[INDEX_DY0];
}
}
// ns points to the first regularization "sample". Put diagonal of 1.0;
for (int npar = INDEX_DM; npar < jt.length; npar++ ) {
fx[ns] = vector[npar];
jt[npar][ns++] = 1.0;
}
return fx;
}
public double [] getFxAndJacobian(
double delta, // for testing derivatives: calculates as delta-F/delta_x
double [] vector,
double [][] jt) { // should be either [vector.length][] or null - then only fx is calculated
double [] fx0=getFxAndJacobian(vector,null);
for (int np = 0; np < vector.length; np++) {
double [] vector1 = vector.clone();
vector1[np]+= delta;
double [] fxp=getFxAndJacobian(vector1,null);
vector1 = vector.clone();
vector1[np]-= delta;
double [] fxm=getFxAndJacobian(vector1,null);
jt[np] = new double [fxp.length];
for (int i = 0; i < fxp.length; i++) {
jt[np][i] = (fxp[i] - fxm[i])/delta/2;
}
}
return fx0;
}
public void debugJt(
double delta,
double [] vector) {
// int num_points = this.values.length;
int num_pars = vector.length;
double [] max_diff = new double [num_pars];
// delta = 0.001;
double [][] jt = new double [num_pars][];
double [][] jt_delta = new double [num_pars][];
double [] fx = getFxAndJacobian( vector,jt);
getFxAndJacobian(delta, vector,jt_delta);
System.out.println("Test of jt-jt_delta difference, delta = "+delta);
System.out.print(String.format("%3s: %10s ", "#", "fx"));
for (int anp = 0; anp< pair_par_index.length; anp++){
int pair = pair_par_index[anp][0];
int par_type = pair_par_index[anp][1];
int ncam = -1;
if (pair >= 0) ncam = pair/ QUAD_KXY.length;
String par_name = PAR_NAMES[par_type] + ((pair >= 0) ?("-"+PAIR_NAMES[pair]+"-"+CAM_NAMES[ncam]):"");
System.out.print(String.format("%17s ", par_name));
}
System.out.println();
for (int i = 0; i < fx.length; i++) {
System.out.print(String.format("%3d: %10.7f ", i, fx[i]));
for (int np = 0; np < num_pars; np++) {
System.out.print(String.format("%8.5f %8.5f ", jt_delta[np][i], 1000*(jt[np][i] - jt_delta[np][i])));
double adiff = Math.abs(jt[np][i] - jt_delta[np][i]);
if (adiff > max_diff[np]) {
max_diff[np] = adiff;
}
}
System.out.println();
}
System.out.print(String.format("%15s ", "Maximal diff:"));
for (int np = 0; np < num_pars; np++) {
System.out.print(String.format("%8s %8.5f ", "1/1000×", 1000*max_diff[np]));
}
System.out.println();
}
// int [][] pair_par_index = null; // reverse table - from parameter number to pair (-1 - global) and derivative index ([0] - not used?)
double [] getYMinusFxWeighted(
double [] fx,
double [] rmses // {rms,rms_pure};
) {
if (rmses == null) {
rmses = new double[2];
}
double [] y_minus_fx_w = new double [fx.length];
double swd2=0.0;
int num_pure = fx.length - num_pairs * num_pair_pars;
for (int ns = 0; ns < num_pure; ns++) {
double d = y_vector[ns]-fx[ns];
double dw = d * w_vector[ns];
swd2 += d * dw;
y_minus_fx_w[ns] = dw;
}
rmses[1] = Math.sqrt(swd2)/ weights_pure;
for (int ns = num_pure; ns < fx.length; ns++) {
double d = -fx[ns];
double dw = d * w_vector[ns];
swd2 += d * dw;
y_minus_fx_w[ns] = dw;
}
rmses[0] = Math.sqrt(swd2);
return y_minus_fx_w;
}
// Calculate JtJ, JtYminusFx
public double [][] getWJtJlambda(
double lambda,
double [][] jt){
int num_pars = jt.length;
int nup_points = jt[0].length;
double [][] wjtjl = new double [num_pars][num_pars];
for (int i = 0; i < num_pars; i++) {
for (int j = i; j < num_pars; j++) {
double d = 0.0;
for (int k = 0; k < nup_points; k++) {
d += this.w_vector[k]*jt[i][k]*jt[j][k];
}
wjtjl[i][j] = d;
if (i == j) {
wjtjl[i][j] += d * lambda;
} else {
wjtjl[j][i] = d;
}
}
}
return wjtjl;
}
public double [] getJtWdiff(
double [] wdiff,
double [][] jt){
int num_pars = jt.length;
int nup_points = jt[0].length;
double [] wjtymfx = new double [num_pars];
for (int i = 0; i < num_pars; i++) {
double d = 0;
for (int j = 0; j < nup_points; j++) d += wdiff[j] + jt[i][j];
wjtymfx[i] = d;
}
return wjtymfx;
}
public boolean runLma(
double lambda, // 0.1
double lambda_scale_good,// 0.5
double lambda_scale_bad, // 8.0
double lambda_max, // 100
double rms_diff, // 0.001
int num_iter, // 20
int debug_level)
{
boolean [] rslt = {false,false};
int iter = 0;
for (iter = 0; iter < num_iter; iter++) {
rslt = lmaStep(
lambda,
rms_diff,
debug_level);
if (debug_level > 1) {
System.out.println("LMA step "+iter+": {"+rslt[0]+","+rslt[1]+"} full RMS="+good_or_bad_rms[0]+
" ("+initial_rms[0]+"), pure RMS="+good_or_bad_rms[1]+" ("+initial_rms[1]+") + lambda="+lambda);
}
if (rslt[1]) {
break;
}
if (rslt[0]) { // good
lambda *= lambda_scale_good;
} else {
lambda *= lambda_scale_bad;
if (lambda > lambda_max) {
break;
}
}
}
if (rslt[0]) { // better, but num tries exceeded
if (iter >= num_iter) {
if (debug_level > 0) System.out.println("Step "+iter+": Improved, but number of steps exceeded maximal");
} else {
if (debug_level > 0) System.out.println("Step "+iter+": LMA: Success");
}
} else { // improved over initial ?
if (last_rms[0] < initial_rms[0]) {
rslt[0] = true;
if (debug_level > 0) System.out.println("Step "+iter+": Failed to converge, but result improved over initial");
} else {
if (debug_level > 0) System.out.println("Step "+iter+": Failed to converge");
}
}
if (debug_level > 0) {
System.out.println("LMA: full RMS="+last_rms[0]+" ("+initial_rms[0]+"), pure RMS="+last_rms[1]+" ("+initial_rms[1]+") + lambda="+lambda);
}
return rslt[0];
}
public boolean [] lmaStep(
double lambda,
double rms_diff,
int debug_level) {
int num_pars = vector.length;
boolean [] rslt = {false,false};
if (this.last_rms == null) { //first time, need to calculate all (vector is valid)
this.last_jt = new double [num_pars][] ; // [num_points];
double [] fx = getFxAndJacobian(
this.vector, // double [] vector,
this.last_jt); // double [][] jt) { // should be either [vector.length][samples.size()] or null - then only fx is calculated
this.last_ymfx = getYMinusFxWeighted(
fx, // double [] fx,
this.last_rms); // double [] rmses) // {rms,rms_pure};
this.initial_rms = this.last_rms.clone();
this.good_or_bad_rms = this.last_rms.clone();
if (debug_level > 3) {
debugJt(
0.000001, // double delta,
this.vector); // double [] vector);
}
}
Matrix y_minus_fx_weighted = new Matrix(this.last_ymfx, this.last_ymfx.length);
Matrix wjtjlambda = new Matrix(getWJtJlambda(
lambda, // *10, // temporary
this.last_jt)); // double [][] jt)
if (debug_level>2) {
System.out.println("JtJ + lambda*diag(JtJ");
wjtjlambda.print(18, 6);
}
Matrix jtjl_inv = null;
try {
jtjl_inv = wjtjlambda.inverse(); // check for errors
} catch (RuntimeException e) {
rslt[1] = true;
if (debug_level > 0) {
System.out.println("Singular Matrix");
}
return rslt;
}
if (debug_level>2) {
System.out.println("(JtJ + lambda*diag(JtJ).inv()");
jtjl_inv.print(18, 6);
}
Matrix jty = (new Matrix(this.last_jt)).times(y_minus_fx_weighted);
if (debug_level>2) {
System.out.println("Jt * (y-fx)");
jty.print(18, 6);
}
Matrix mdelta = jtjl_inv.times(jty);
if (debug_level>2) {
System.out.println("mdelta");
mdelta.print(18, 6);
}
double [] delta = mdelta.getColumnPackedCopy();
double [] new_vector = this.vector.clone();
for (int i = 0; i < num_pars; i++) new_vector[i]+= delta[i];
// being optimistic, modify jt and last_ymfx in place, restore if failed
double[] fx = getFxAndJacobian(
new_vector, // double [] vector,
this.last_jt); // double [][] jt) { // should be either [vector.length][samples.size()] or null - then only fx is calculated
double [] rms = new double[2];
this.last_ymfx = getYMinusFxWeighted(
fx, // double [] fx,
this.last_rms); // double [] rmses) // {rms,rms_pure};
this.good_or_bad_rms = rms.clone();
if (rms[0] < this.last_rms[0]) { // improved
rslt[0] = true;
rslt[1] = rms[0] >=(this.last_rms[0] * (1.0 - rms_diff));
this.last_rms = rms.clone();
this.vector = new_vector.clone();
if (debug_level > 2) {
System.out.print("New vector: ");
for (int np = 0; np < vector.length; np++) {
System.out.print(this.vector[np]+" ");
}
System.out.println();
}
} else { // worsened
rslt[0] = false;
rslt[1] = false; // do not know, caller will decide
// restore state
fx = getFxAndJacobian( //recalculate fx
new_vector, // double [] vector,
this.last_jt); // double [][] jt) { // should be either [vector.length][samples.size()] or null - then only fx is calculated
this.last_ymfx = getYMinusFxWeighted(
fx, // double [] fx,
this.last_rms); // double [] rmses) // {rms,rms_pure};
if (debug_level > 2) {
debugJt(
0.000001, // double delta,
this.vector); // double [] vector);
}
}
return rslt;
}
private void setupMask() {
if (!use_master) for (int i = 0; i < QUAD_KXY.length; i++) Kxy[i] = null;
if (!use_aux) for (int i = 0; i < QUAD_KXY.length; i++) Kxy[i + QUAD_KXY.length] = null;
if (!use_inter) Kxy[2 * QUAD_KXY.length] = null;
num_pairs = (use_master ? QUAD_KXY.length:0) + (use_aux ? QUAD_KXY.length:0) + (use_inter ? 1:0);
num_pair_pars = (adjust_M? 1 : 0) + (adjust_xy ? 2:0);
int num_pars = INDEX_DM + num_pairs * num_pair_pars;
//double [] vector = new double [num_pars];
pair_par_index = new int [num_pars][2];
mxy_pair_index = new int [num_pairs][2]; // from pair to parameter number for M, and DX (-1 - none)
int npar = 0;
for (int i = 0; i <= INDEX_DSXY; i++) { // common parameters
pair_par_index[npar] [0] = -1;
pair_par_index[npar++][1] = i;
}
for (int pair = 0; pair < Kxy.length; pair++) if (Kxy[pair] != null) {
if (adjust_M) {
mxy_pair_index[pair][0] = npar;
pair_par_index[npar ][0] = pair;
pair_par_index[npar++][1] = INDEX_DM;
} else {
mxy_pair_index[pair][0] = -1;
}
if (adjust_xy) {
mxy_pair_index[pair ][1] = npar;
pair_par_index[npar ][0] = pair;
pair_par_index[npar++][1] = INDEX_DX0; //== 1
pair_par_index[npar ][0] = pair;
pair_par_index[npar++][1] = INDEX_DY0; //== 2
} else {
mxy_pair_index[pair][1] = -1;
}
}
}
public void addCorrData(
int ncam, // 0 - main, 1 - aux, 2 - inter
double [][] corr_data) // 6 pairs for cameras, 1 pair - for inter
{
for (int i = 0; i < corr_data.length; i++) this.corr_data[QUAD_KXY.length * ncam + i] = corr_data[i];
}
public double [] init_vector() {
double [] vector = new double [pair_par_index.length];
vector[INDEX_DSX] = 1.0/default_width;
vector[INDEX_DSY] = 1.0/default_width;
// all other parameters start with 0.0;
return vector;
}
public double getFxAndDerivatives(
int pair,
double x,
double y,
double disparity,
double A,
double Sx,
double Sy,
double Sxy,
double M, // null - assumed 0
double [] Dxy,// null - assumed {0,0}
double [] deriv) // null - do not calculate. should be initialized to INDEX_LEN length
// returns derivatives for d, A, Sx, Sy, Sxy, M[pair], Dxy[pair][0], Dxy[pair][1]
{ double [] xy0 = {-disparity* Kxy[pair][0], -disparity* Kxy[pair][1]};
if (Dxy != null) {
xy0[0] += Dxy[0];
xy0[1] += Dxy[1];
}
double dxn = x - xy0[0];
double dyn = y - xy0[1];
double dx = Sx * dxn;
double dy = Sy * dyn;
double nfxy = (1.0 - (dx*dx + dy*dy +2*Sxy*dx*dy));
if (!Double.isNaN(M)) nfxy += M;
double fxy = A * nfxy;
if (deriv != null) {
// df/dA
deriv[INDEX_DA] = nfxy;
// d/dSxy[2]
deriv[INDEX_DSXY] = -A * dx*dy;
// d/dother
double d_ddx = -2 * A* (dx + Sxy*dy);
double d_ddy = -2 * A* (dy + Sxy*dx);
// d/ddisparity = d_ddx * ddx_ddisparity + d_ddy * ddy_ddisparity
deriv[INDEX_DISP] = (d_ddx * Sx * Kxy[pair][0]) + ( d_ddy * Sy * Kxy[pair][1]);
// d/dSX = d_ddx * ddx_dSX
deriv[INDEX_DSX] = d_ddx * dxn;
// d/dSY = d_ddy * ddy_dSY
deriv[INDEX_DSY] = d_ddy * dyn;
// individual/maskable
if (!Double.isNaN(M)) deriv[INDEX_DM] = A;
if (Dxy != null) {
// d/dDX = d_ddx * ddx_dDX
deriv[INDEX_DX0] = -d_ddx * Sx;
deriv[INDEX_DY0] = -d_ddy * Sy;
}
}
return fxy;
}
/*
static final int INDEX_DISP = 0;
static final int INDEX_DA = 1;
static final int INDEX_DSX = 2;
static final int INDEX_DSY = 3;
static final int INDEX_DSXY = 4;
static final int INDEX_DM = 5;
static final int INDEX_DX0 = 6;
static final int INDEX_DY0 = 7;
static final int INDEX_LEN = 8;
*/
}
import java.util.ArrayList;
import java.util.HashMap;
import Jama.Matrix;
/**
**
** Correlation2dLMA - Fit multi - baseline correaltion pairs to the model
......@@ -52,6 +47,12 @@ import Jama.Matrix;
*
*/
import java.util.ArrayList;
import java.util.HashMap;
import Jama.Matrix;
public class Correlations2dLMA {
final static int X0_INDEX = 0;
......@@ -519,17 +520,11 @@ public class Correlations2dLMA {
}
if (debug_level > 0) {
System.out.println("LMA: full RMS="+last_rms[0]+" ("+initial_rms[0]+"), pure RMS="+last_rms[1]+" ("+initial_rms[1]+") + lambda="+lambda);
// System.out.println("LMA: full RMS="+good_or_bad_rms[0]+" ("+initial_rms[0]+"), pure RMS="+good_or_bad_rms[1]+" ("+initial_rms[1]+") + lambda="+lambda);
}
return rslt[0];
}
/*
double [] last_rms = null; // {rms, rms_pure}, matching this.vector
double [] initial_rms = null; // {rms, rms_pure}, first-calcualted rms
*/
// returns {success, done}
public boolean [] lmaStep(
......@@ -723,48 +718,4 @@ public class Correlations2dLMA {
return fx;
}
// public double [] getValues(xyvwh) {
// double [] values= new double [samples.size()];
// for (int i = 0; i < values.length; i++) values[i] = samples.get(i).v;
// return values;
// }
/*
*
if (debugLevel>-1) {
jtj.print(18, 6);
}
Matrix jtj_inv = jtj.inverse();
Matrix jty = jt.times(y_minus_fx_weighted);
Matrix mrslt = jtj_inv.times(jty);
double [] drslt = mrslt.getColumnPackedCopy();
*
* Fitting parabola for multiple grids
* Difference between ortho and diagonals - just point coordinates and extra overall scale (weight account for number averaged)
* Each group of compatible is already averaged, so each group has a single individual variable - scale.
*
* Parabolas (1 for each group) are Ag * (1 - ((x-x0)/Wx) ^ 2 - (y/Wy)^2), where As is a per-group scale
* Wy = Wm * scale +Wyd
* Wx = Wm * scale +Wyd + Wxy
*
* Wm is a correlation measurement parameter, it does not depend on x/y and on particular pair, it depends on the LPF, so the
* total contribution is proportional to the baseline reduction (scale)
*
* Wyd is widening caused the image itself - probably noise and other factors of poor correlation contrast. When multiple
* orthogonal directions are combined it influences equally all directions (x,y) so Wx includes that term also
*
* Wxy widens maximum in disparity direction, it is caused by multiple overlapping maximums for different disparities and for
* strong enough matches can indicate miz of disparities in the same tile
*
* Fitting of a single scale groups (1 or 2) has to have Wm constant.
*
*
*
*/
}
......@@ -117,8 +117,12 @@ public class ImageDtt {
static int BI_ASTR_DIAGM_INDEX = 21; //21 - strength for main diagonal pair of the aux camera
static int BI_ASTR_DIAGO_INDEX = 22; //22 - strength for main diagonal pair of the aux camera
static int BI_STR_CROSS_INDEX = 23; //23 - strength between the main the aux camera
static int BI_STR_ALL_INDEX = 24; //23 - average strength (product of strengths to 1/3 power), TODO: strength at cross disparity
static int BI_TARGET_INDEX = 25; //24 - target disparity
static int BI_STR_ALL_INDEX = 24; //24 - average strength (product of strengths to 1/3 power), TODO: strength at cross disparity
static int BI_TARGET_INDEX = 25; //25 - target disparity
static int BI_DBG1_INDEX = 26; //26 - debug layer 1
static int BI_DBG2_INDEX = 27; //27 - debug layer 2
static int BI_DBG3_INDEX = 28; //28 - debug layer 2
static int BI_DBG4_INDEX = 29; //29 - debug layer 2
static String [] BIDISPARITY_TITLES = {
"disparity","disp_hor","disp_vert","disp_diagm","disp_diago",
......@@ -126,7 +130,7 @@ public class ImageDtt {
"bi-disparity","bi-disparity-dx","bi-disparity-dy",
"strength", "str_hor", "str_vert", "str_diagm", "str_diago",
"astrength", "astr_hor", "astr_vert", "astr_diagm", "astr_diago",
"bi-strength", "all-strength", "target"};
"bi-strength", "all-strength", "target", "dbg1", "dbg2", "dbg3", "dbg4"};
static int [] BIDISPARITY_STRENGTHS= {
BI_STR_FULL_INDEX, BI_STR_VERT_INDEX, BI_STR_DIAGM_INDEX, BI_STR_DIAGO_INDEX,
BI_ASTR_FULL_INDEX, BI_ASTR_HOR_INDEX, BI_ASTR_VERT_INDEX, BI_ASTR_DIAGM_INDEX,
......@@ -7175,6 +7179,7 @@ public class ImageDtt {
/**
* Calculate disparity and strength for a inter-camera phase correlation of a pair of quad-cameras
* @param clt_parameters various configuration parameters
* @param fatzero - phase correlation fat zero (higher - ~LPF)
* @param corr2d Instance of the 2d correlator class
* @param clt_data_tile_main aberration-corrected FD CLT data for one tile of the main quad camera [sub-camera][color][quadrant][index]
* @param clt_data_tile_aux aberration-corrected FD CLT data for one tile of the auxiliary quad camera [sub-camera][color][quadrant][index]
......@@ -7190,6 +7195,7 @@ public class ImageDtt {
*/
public double [] tileInterCamCorrs(
final EyesisCorrectionParameters.CLTParameters clt_parameters,
final double fatzero, // May use correlation fat zero from 2 different parameters - fat_zero and rig.ml_fatzero
final Correlation2d corr2d,
final double [][][][] clt_data_tile_main,
final double [][][][] clt_data_tile_aux,
......@@ -7207,7 +7213,7 @@ public class ImageDtt {
clt_data_tile_aux, // double [][][][] clt_data_tile_aux,
filter, // double [] lpf,
col_weights, // double [] col_weights,
clt_parameters.fat_zero); // double fat_zero)
fatzero); // double fat_zero)
double [] stripe_inter = corr2d. scaleRotateInterpoateSingleCorrelation(
inter_cam_corr, // double [] corr,
......@@ -7334,6 +7340,7 @@ public class ImageDtt {
* Calculate correlation/strength, start with center of mass (CM) for all available pairs, proceed with LMA
* if strength is sufficient. Calculate 4 directional correlation/strengths if requested and strong enough
* @param clt_parameters various configuration parameters
* @param fatzero phaase correlation fat zero (higher ~LPF)
* @param get4dirs request 4 directional correlations (horizontal, vertical main diagonal, other diagonal)
* @param corr2d Instance of the 2d correlator class
* @param clt_data aberration-corrected FD CLT data [camera][color][quadrant][index]
......@@ -7349,6 +7356,7 @@ public class ImageDtt {
*/
public double [] tileCorrs(
final EyesisCorrectionParameters.CLTParameters clt_parameters,
final double fatzero, // May use correlation fat zero from 2 different parameters - fat_zero and rig.ml_fatzero
final boolean get4dirs, // calculate disparity/strength for each of the 4 directions
final Correlation2d corr2d,
final double [][][][] clt_data,
......@@ -7373,7 +7381,7 @@ public class ImageDtt {
all_pairs, // int pairs_mask,
filter, // double [] lpf,
col_weights, // double [] col_weights,
clt_parameters.fat_zero); // double fat_zero)
fatzero); // double fat_zero)
// calculate interpolated "strips" to match different scales and orientations (ortho/diagonal) on the
// fine (0.5 pix) grid. ortho for scale == 1 provide even/even samples (1/4 of all), diagonal ones -
......@@ -7696,6 +7704,7 @@ public class ImageDtt {
public double [][][][][][][] clt_bi_quad(
final EyesisCorrectionParameters.CLTParameters clt_parameters,
final double fatzero, // May use correlation fat zero from 2 different parameters - fat_zero and rig.ml_fatzero
final int [][] tile_op, // [tilesY][tilesX] - what to do - 0 - nothing for this tile
final double [][] disparity_array, // [tilesY][tilesX] - individual per-tile expected disparity
final double [][][] image_data_main, // first index - number of image in a quad
......@@ -7833,7 +7842,7 @@ public class ImageDtt {
System.out.println("max_corr_radius= "+clt_parameters.max_corr_radius);
System.out.println("max_search_radius= "+max_search_radius);
System.out.println("max_search_radius_poly="+max_search_radius_poly);
System.out.println("corr_fat_zero= "+clt_parameters.fat_zero);
System.out.println("corr_fat_zero= "+fatzero);
System.out.println("disparity_array[0][0]= "+disparity_array[0][0]);
......@@ -8112,6 +8121,7 @@ public class ImageDtt {
double [] tile_corrs_main = tileCorrs(
clt_parameters, // final EyesisCorrectionParameters.CLTParameters clt_parameters,
fatzero, // final double fatzero, // May use correlation fat zero from 2 different parameters - fat_zero and rig.ml_fatzero
true, // final boolean get4dirs, // calculate disparity/strength for each of the 4 directions
corr2d, // final Correlation2d corr2d,
clt_data_main, // final double [][][][] clt_data,
......@@ -8125,6 +8135,7 @@ public class ImageDtt {
double [] tile_corrs_aux = tileCorrs(
clt_parameters, // final EyesisCorrectionParameters.CLTParameters clt_parameters,
fatzero, // final double fatzero, // May use correlation fat zero from 2 different parameters - fat_zero and rig.ml_fatzero
true, // final boolean get4dirs, // calculate disparity/strength for each of the 4 directions
corr2d, // final Correlation2d corr2d,
clt_data_aux, // final double [][][][] clt_data,
......@@ -8162,6 +8173,7 @@ public class ImageDtt {
double [] inter_corrs_dxy = tileInterCamCorrs(
clt_parameters, // final EyesisCorrectionParameters.CLTParameters clt_parameters,
fatzero, // final double fatzero, // May use correlation fat zero from 2 different parameters - fat_zero and rig.ml_fatzero
corr2d, // final Correlation2d corr2d,
clt_data_main, // double [][][][] clt_data_tile_main,
clt_data_aux, // double [][][][] clt_data_tile_aux,
......@@ -8240,6 +8252,8 @@ public class ImageDtt {
if (ml_data_dbg1 != null) {
tileInterCamCorrs(
clt_parameters, // final EyesisCorrectionParameters.CLTParameters clt_parameters,
fatzero, // final double fatzero, // May use correlation fat zero from 2 different parameters - fat_zero and rig.ml_fatzero
corr2d, // final Correlation2d corr2d,
clt_data_main, // double [][][][] clt_data_tile_main,
clt_data_main, // double [][][][] clt_data_tile_aux,
......@@ -8335,6 +8349,7 @@ public class ImageDtt {
public void clt_bi_macro(
final EyesisCorrectionParameters.CLTParameters clt_parameters,
final double fatzero, // May use correlation fat zero from 2 different parameters - fat_zero and rig.ml_fatzero
final int macro_scale,
final int [][] tile_op, // [tilesY][tilesX] - what to do - 0 - nothing for this tile
final double [][] disparity_array, // [tilesY][tilesX] - individual per-tile expected disparity
......@@ -8616,6 +8631,7 @@ public class ImageDtt {
double [] inter_corrs_dxy = tileInterCamCorrs(
clt_parameters, // final EyesisCorrectionParameters.CLTParameters clt_parameters,
fatzero, // final double fatzero, // May use correlation fat zero from 2 different parameters - fat_zero and rig.ml_fatzero
corr2d, // final Correlation2d corr2d,
clt_data_main, // double [][][][] clt_data_tile_main,
clt_data_aux, // double [][][][] clt_data_tile_aux,
......
......@@ -7,7 +7,7 @@ import java.util.Arrays;
** Copyright (C) 2017 Elphel, Inc.
**
** -----------------------------------------------------------------------------**
**
**
** TileNeibs.java is free software: you can redistribute it and/or modify
** it under the terms of the GNU General Public License as published by
** the Free Software Foundation, either version 3 of the License, or
......@@ -36,37 +36,37 @@ public class TileNeibs{
this.sizeX = sizeX;
this.sizeY = sizeY;
}
public int numNeibs() // TODO: make configurable to
public int numNeibs() // TODO: make configurable to
{
return dirs;
}
public int opposite(int dir){
return (dir + dirs / 2) % dirs;
}
int getLength(){
return sizeX * sizeY;
}
/**
* Get x,y pair from index
* @param indx element index
* @return array of {x,y}
*/
int [] getXY(int indx)
{
int [] xy = {indx % sizeX ,indx / sizeX};
return xy;
}
/**
* Get element index from x and y
* @param x horizontal position
* @param y vertical position
* @return element linescan index
*/
int getIndex(int x, int y){
if ((x < 0) || (y < 0) || (x >= sizeX) || (y >= sizeY)) return -1;
return y * sizeX + x;
......@@ -78,10 +78,25 @@ public class TileNeibs{
}
/**
* Get 2d element index after step N, NE, ... NW. Returns -1 if leaving array
* Get 2d element index after step N, NE, ... NW. Returns -1 if leaving array
* @param indx start index
* @param dx offsett in x direction
* @param dy offsett in y direction
* @return new index or -1 if leaving
*/
int getNeibIndex(int indx, int dx, int dy) {
int y = indx / sizeX + dy;
int x = indx % sizeX + dx;
if ((x < 0) || (y < 0 ) || (x >= sizeX) || (y >= sizeY)) return -1;
return y * sizeX + x;
}
/**
* Get 2d element index after step N, NE, ... NW. Returns -1 if leaving array
* @param indx start index
* @param dir step direction (CW from up)
* @return new index or -1 if leaving
* @return new index or -1 if leaving
*/
int getNeibIndex(int indx, int dir)
{
......@@ -93,23 +108,23 @@ public class TileNeibs{
}
// switch (dir % dirs){
switch (dir){
case 0: return (y == 0) ? -1 : (indx - sizeX);
case 1: return ((y == 0) || ( x == (sizeX - 1))) ? -1 : (indx - sizeX + 1);
case 2: return ( ( x == (sizeX - 1))) ? -1 : (indx + 1);
case 3: return ((y == (sizeY - 1)) || ( x == (sizeX - 1))) ? -1 : (indx + sizeX + 1);
case 4: return ((y == (sizeY - 1)) ) ? -1 : (indx + sizeX);
case 5: return ((y == (sizeY - 1)) || ( x == 0)) ? -1 : (indx + sizeX - 1);
case 6: return ( ( x == 0)) ? -1 : (indx - 1);
case 7: return ((y == 0) || ( x == 0)) ? -1 : (indx - sizeX - 1);
case 0: return (y == 0) ? -1 : (indx - sizeX);
case 1: return ((y == 0) || ( x == (sizeX - 1))) ? -1 : (indx - sizeX + 1);
case 2: return ( ( x == (sizeX - 1))) ? -1 : (indx + 1);
case 3: return ((y == (sizeY - 1)) || ( x == (sizeX - 1))) ? -1 : (indx + sizeX + 1);
case 4: return ((y == (sizeY - 1)) ) ? -1 : (indx + sizeX);
case 5: return ((y == (sizeY - 1)) || ( x == 0)) ? -1 : (indx + sizeX - 1);
case 6: return ( ( x == 0)) ? -1 : (indx - 1);
case 7: return ((y == 0) || ( x == 0)) ? -1 : (indx - sizeX - 1);
default: return indx;
}
}
/**
* Get 2d element index after step N, NE, ... NW. Returns -1 if leaving array
* And 2 steps for dir = 8(N), 9(NNE),..23(NNW)
* And 2 steps for dir = 8(N), 9(NNE),..23(NNW)
* @param indx start index
* @param dir step direction (CW from up)
* @return new index or -1 if leaving
* @return new index or -1 if leaving
*/
int getNeibIndex2(int indx, int dir)
{
......@@ -121,37 +136,37 @@ public class TileNeibs{
}
// switch (dir % dirs){
switch (dir){
case 0: return (y == 0) ? -1 : (indx - sizeX);
case 1: return ((y == 0) || ( x == (sizeX - 1))) ? -1 : (indx - sizeX + 1);
case 2: return ( ( x == (sizeX - 1))) ? -1 : (indx + 1);
case 3: return ((y == (sizeY - 1)) || ( x == (sizeX - 1))) ? -1 : (indx + sizeX + 1);
case 4: return ((y == (sizeY - 1)) ) ? -1 : (indx + sizeX);
case 5: return ((y == (sizeY - 1)) || ( x == 0)) ? -1 : (indx + sizeX - 1);
case 6: return ( ( x == 0)) ? -1 : (indx - 1);
case 0: return (y == 0) ? -1 : (indx - sizeX);
case 1: return ((y == 0) || ( x == (sizeX - 1))) ? -1 : (indx - sizeX + 1);
case 2: return ( ( x == (sizeX - 1))) ? -1 : (indx + 1);
case 3: return ((y == (sizeY - 1)) || ( x == (sizeX - 1))) ? -1 : (indx + sizeX + 1);
case 4: return ((y == (sizeY - 1)) ) ? -1 : (indx + sizeX);
case 5: return ((y == (sizeY - 1)) || ( x == 0)) ? -1 : (indx + sizeX - 1);
case 6: return ( ( x == 0)) ? -1 : (indx - 1);
case 7: return ((y == 0) || ( x == 0)) ? -1 : (indx - sizeX - 1);
case 8: return ( y < 2) ? -1 : (indx - 2 * sizeX);
case 9: return ((y < 2) || ( x > (sizeX - 2))) ? -1 : (indx - 2 * sizeX + 1);
case 10: return ((y < 2) || ( x > (sizeX - 3))) ? -1 : (indx - 2 * sizeX + 2);
case 11: return ((y < 1) || ( x > (sizeX - 3))) ? -1 : (indx - 1 * sizeX + 2);
case 12: return ( ( x > (sizeX - 3))) ? -1 : (indx + 2);
case 13: return ((y > (sizeY - 2)) || ( x > (sizeX - 3))) ? -1 : (indx + 1 * sizeX + 2);
case 14: return ((y > (sizeY - 3)) || ( x > (sizeX - 3))) ? -1 : (indx + 2 * sizeX + 2);
case 15: return ((y > (sizeY - 3)) || ( x > (sizeX - 2))) ? -1 : (indx + 2 * sizeX + 1);
case 16: return ((y > (sizeY - 3)) ) ? -1 : (indx + 2 * sizeX);
case 17: return ((y > (sizeY - 3)) || ( x < 1)) ? -1 : (indx + 2 * sizeX - 1);
case 18: return ((y > (sizeY - 3)) || ( x < 2)) ? -1 : (indx + 2 * sizeX - 2);
case 19: return ((y > (sizeY - 2)) || ( x < 2)) ? -1 : (indx + 1 * sizeX - 2);
case 20: return ( ( x < 2)) ? -1 : (indx - 2);
case 21: return ((y < 1) || ( x < 2)) ? -1 : (indx - 1 * sizeX - 2);
case 22: return ((y < 2) || ( x < 2)) ? -1 : (indx - 2 * sizeX - 2);
case 23: return ((y < 2) || ( x < 1)) ? -1 : (indx - 2 * sizeX - 1);
case 8: return ( y < 2) ? -1 : (indx - 2 * sizeX);
case 9: return ((y < 2) || ( x > (sizeX - 2))) ? -1 : (indx - 2 * sizeX + 1);
case 10: return ((y < 2) || ( x > (sizeX - 3))) ? -1 : (indx - 2 * sizeX + 2);
case 11: return ((y < 1) || ( x > (sizeX - 3))) ? -1 : (indx - 1 * sizeX + 2);
case 12: return ( ( x > (sizeX - 3))) ? -1 : (indx + 2);
case 13: return ((y > (sizeY - 2)) || ( x > (sizeX - 3))) ? -1 : (indx + 1 * sizeX + 2);
case 14: return ((y > (sizeY - 3)) || ( x > (sizeX - 3))) ? -1 : (indx + 2 * sizeX + 2);
case 15: return ((y > (sizeY - 3)) || ( x > (sizeX - 2))) ? -1 : (indx + 2 * sizeX + 1);
case 16: return ((y > (sizeY - 3)) ) ? -1 : (indx + 2 * sizeX);
case 17: return ((y > (sizeY - 3)) || ( x < 1)) ? -1 : (indx + 2 * sizeX - 1);
case 18: return ((y > (sizeY - 3)) || ( x < 2)) ? -1 : (indx + 2 * sizeX - 2);
case 19: return ((y > (sizeY - 2)) || ( x < 2)) ? -1 : (indx + 1 * sizeX - 2);
case 20: return ( ( x < 2)) ? -1 : (indx - 2);
case 21: return ((y < 1) || ( x < 2)) ? -1 : (indx - 1 * sizeX - 2);
case 22: return ((y < 2) || ( x < 2)) ? -1 : (indx - 2 * sizeX - 2);
case 23: return ((y < 2) || ( x < 1)) ? -1 : (indx - 2 * sizeX - 1);
default: return indx;
}
}
/**
* Return tile segment for 50% overlap. -1 - center, 0 N, 1 - NE,... 7 - NW
* @param indx element index
......@@ -210,7 +225,7 @@ public class TileNeibs{
{ 1,-1},
{ 0,-1},
{-1,-1}};
int dx = dxy[segm1 + 1][1] - dxy[segm + 1][1];
int dx = dxy[segm1 + 1][1] - dxy[segm + 1][1];
int dy = dxy[segm1 + 1][0] - dxy[segm + 1][0];
for (int dp1 = 0; dp1 <=8; dp1++) {
int sdx = (dx > 0) ? 1: ( (dx < 0) ? -1 : 0);
......@@ -234,7 +249,7 @@ public class TileNeibs{
prohibit);
for (int i = 0; i < tiles.length; i++) tiles[i] = !itiles[i];
}
public void growSelection(
int grow, // grow tile selection by 1 over non-background tiles 1: 4 directions, 2 - 8 directions, 3 - 8 by 1, 4 by 1 more
boolean [] tiles,
......@@ -255,7 +270,7 @@ public class TileNeibs{
if (!tiles[tindx + 1] && src_tiles[tindx]){
num_new++;
}
tiles[tindx + 1] |= src_tiles[tindx];
tiles[tindx + 1] |= src_tiles[tindx];
}
}
......@@ -299,14 +314,14 @@ public class TileNeibs{
}
}
}
public boolean [] boundShape(
boolean [] selection,
boolean octo)
{
boolean [] bound_shape = new boolean [selection.length];
int min_x=-1, max_x=-1, min_y=-1, max_y=-1;
int min_s=-1, max_s=-1, min_d=-1, max_d=-1;
int min_x=-1, max_x=-1, min_y=-1, max_y=-1;
int min_s=-1, max_s=-1, min_d=-1, max_d=-1;
boolean is_set = false;
for (int i = 0; i < selection.length; i++) if (selection[i]){
int [] xy = getXY(i);
......@@ -325,39 +340,39 @@ public class TileNeibs{
is_set = true;
} else {
if (xy[0] < min_x) min_x = xy[0];
else if (xy[0] > max_x) max_x = xy[0];
else if (xy[0] > max_x) max_x = xy[0];
if (xy[1] < min_y) min_y = xy[1];
else if (xy[1] > max_y) max_y = xy[1];
if (octo) {
if (sd[0] < min_s) min_s = sd[0];
else if (sd[0] > max_s) max_s = sd[0];
else if (sd[0] > max_s) max_s = sd[0];
if (sd[1] < min_d) min_d = sd[1];
else if (sd[1] > max_d) max_d = sd[1];
}
}
}
for (int y = min_y; y <= max_y; y++){
for (int x = min_x; x <= max_x; x++){
if (!octo ||
(((x + y) >= min_s) && ((x + y) <= max_s) && ((x - y) >= min_d) && ((x - y) <= max_d))) {
bound_shape[getIndex(x,y)] = true;
bound_shape[getIndex(x,y)] = true;
}
}
}
return bound_shape;
}
/**
* Enumerate clusters on rectangular area
* @param tiles selected tiles, size should be sizeX * sizeY
* @param ordered if true, order tiles from largest to smallest5
* @return integer array, where 0 is unused, 1+ cluster it belongs to
*/
public int [] enumerateClusters(
boolean [] tiles,
boolean ordered)
......@@ -394,7 +409,7 @@ public class TileNeibs{
if (!ordered) {
return enum_clust;
}
// count cluster
int []clustSizes = new int [numClust];
for (int i = 0; i < clustSizes.length; i++) clustSizes[i] = 0;
......@@ -422,9 +437,9 @@ public class TileNeibs{
for (int i = 0; i < revIndex.length; i++) revIndex [pairs[i].index] = (numClust - i); // array was in accending order
int [] enum_clust_ordered = new int[tiles.length];
for (int i=0; i < enum_clust_ordered.length; i++){
enum_clust_ordered[i] = (enum_clust[i] > 0) ? revIndex[enum_clust[i] - 1] : 0;
enum_clust_ordered[i] = (enum_clust[i] > 0) ? revIndex[enum_clust[i] - 1] : 0;
}
return enum_clust_ordered;
return enum_clust_ordered;
}
public int getMax(
int [] data)
......
import java.util.ArrayList;
import ij.IJ;
import ij.ImagePlus;
import ij.ImageStack;
import ij.Prefs;
import ij.io.FileSaver;
/**
** TwoQuadCLT - Process images from a pair of Quad/Octal cameras
**
......@@ -28,6 +20,13 @@ import ij.io.FileSaver;
** -----------------------------------------------------------------------------**
**
*/
import java.util.ArrayList;
import ij.IJ;
import ij.ImagePlus;
import ij.ImageStack;
import ij.Prefs;
import ij.io.FileSaver;
public class TwoQuadCLT {
public long startTime; // start of batch processing
......@@ -339,6 +338,7 @@ public class TwoQuadCLT {
final double [][][][][][][] clt_bidata = // new double[2][quad][nChn][tilesY][tilesX][][]; // first index - main/aux
image_dtt.clt_bi_quad (
clt_parameters, // final EyesisCorrectionParameters.CLTParameters clt_parameters,
clt_parameters.fat_zero, // final double fatzero, // May use correlation fat zero from 2 different parameters - fat_zero and rig.ml_fatzero
tile_op_main, // final int [][] tile_op_main, // [tilesY][tilesX] - what to do - 0 - nothing for this tile
// tile_op_aux, // final int [][] tile_op_aux, // [tilesY][tilesX] - what to do - 0 - nothing for this tile
disparity_array_main, // final double [][] disparity_array, // [tilesY][tilesX] - individual per-tile expected disparity
......@@ -913,6 +913,7 @@ public class TwoQuadCLT {
ImageDtt image_dtt = new ImageDtt();
image_dtt.clt_bi_macro(
clt_parameters, // final EyesisCorrectionParameters.CLTParameters clt_parameters,
clt_parameters.fat_zero, // final double fatzero, // May use correlation fat zero from 2 different parameters - fat_zero and rig.ml_fatzero
macro_scale, // final int macro_scale,
mtile_op, // final int [][] tile_op, // [tilesY][tilesX] - what to do - 0 - nothing for this tile
mdisparity_array, // final double [][] disparity_array, // [tilesY][tilesX] - individual per-tile expected disparity
......@@ -1191,6 +1192,7 @@ if (debugLevel > -100) return true; // temporarily !
final boolean updateStatus,
final int debugLevel) throws Exception
{
int refine_inter = 2; // 3; // 3 - dx, 2 - disparity
System.out.println("enhanceByRig()");
if ((quadCLT_main == null) || (quadCLT_aux == null)) {
System.out.println("QuadCLT instances are not initilaized");
......@@ -1249,6 +1251,7 @@ if (debugLevel > -100) return true; // temporarily !
}
//Re-measure background
final int tilesX = quadCLT_main.tp.getTilesX();
final int tilesY = quadCLT_main.tp.getTilesY();
double [][] disparity_bimap_infinity = measureNewRigDisparity(
quadCLT_main, // QuadCLT quadCLT_main, // tiles should be set
quadCLT_aux, // QuadCLT quadCLT_aux,
......@@ -1271,7 +1274,7 @@ if (debugLevel > -100) return true; // temporarily !
null, // boolean [] was_trusted,
disparity_bimap_infinity ); // double [][] bimap // current state of measurements
if (clt_parameters.show_map && (debugLevel > -2) && clt_parameters.rig.rig_mode_debug){
if (clt_parameters.show_map && (debugLevel > 0) && clt_parameters.rig.rig_mode_debug){
(new showDoubleFloatArrays()).showArrays(
disparity_bimap_infinity,
tilesX,
......@@ -1292,7 +1295,7 @@ if (debugLevel > -100) return true; // temporarily !
disparity_bimap_infinity, // double [][] src_bimap, // current state of measurements (or null for new measurement)
prev_bimap, // double [][] prev_bimap, // previous state of measurements or null
scale_bad, // double [] scale_bad,
2, // int refine_mode, // 0 - by main, 1 - by aux, 2 - by inter
refine_inter, // int refine_mode, // 0 - by main, 1 - by aux, 2 - by inter
false, // boolean keep_inf, // keep expected disparity 0.0 if it was so
0.0, // clt_parameters.rig.refine_min_strength , // double refine_min_strength, // do not refine weaker tiles
clt_parameters.rig.refine_tolerance , // double refine_tolerance, // do not refine if absolute disparity below
......@@ -1320,7 +1323,7 @@ if (debugLevel > -100) return true; // temporarily !
}
if (clt_parameters.show_map && (debugLevel > -2) && clt_parameters.rig.rig_mode_debug){
if (clt_parameters.show_map && (debugLevel > 0) && clt_parameters.rig.rig_mode_debug){
for (int layer = 0; layer < disparity_bimap_infinity.length; layer ++) if (disparity_bimap_infinity[layer] != null){
for (int nTile = 0; nTile < disparity_bimap_infinity[layer].length; nTile++) {
......@@ -1375,7 +1378,7 @@ if (debugLevel > -100) return true; // temporarily !
clt_parameters.rig.trusted_tolerance, // double trusted_tolerance,
null, // boolean [] was_trusted,
disparity_bimap); // double [][] bimap // current state of measurements
if (clt_parameters.show_map && (debugLevel > -2) && clt_parameters.rig.rig_mode_debug){
if (clt_parameters.show_map && (debugLevel > 0) && clt_parameters.rig.rig_mode_debug){
(new showDoubleFloatArrays()).showArrays(
disparity_bimap,
tilesX,
......@@ -1396,7 +1399,7 @@ if (debugLevel > -100) return true; // temporarily !
disparity_bimap, // double [][] src_bimap, // current state of measurements (or null for new measurement)
prev_bimap, // double [][] prev_bimap, // previous state of measurements or null
scale_bad, // double [] scale_bad,
2, // int refine_mode, // 0 - by main, 1 - by aux, 2 - by inter
refine_inter, // int refine_mode, // 0 - by main, 1 - by aux, 2 - by inter
false, // boolean keep_inf, // keep expected disparity 0.0 if it was so
0.0, // clt_parameters.rig.refine_min_strength , // double refine_min_strength, // do not refine weaker tiles
clt_parameters.rig.refine_tolerance , // double refine_tolerance, // do not refine if absolute disparity below
......@@ -1422,7 +1425,7 @@ if (debugLevel > -100) return true; // temporarily !
if (num_new[0] < clt_parameters.rig.min_new) break;
}
if (clt_parameters.show_map && (debugLevel > -2) && clt_parameters.rig.rig_mode_debug){
if (clt_parameters.show_map && (debugLevel > 0) && clt_parameters.rig.rig_mode_debug){
for (int layer = 0; layer < disparity_bimap.length; layer ++) if (disparity_bimap[layer] != null){
for (int nTile = 0; nTile < disparity_bimap[layer].length; nTile++) {
......@@ -1468,7 +1471,7 @@ if (debugLevel > -100) return true; // temporarily !
}
}
if (clt_parameters.show_map && (debugLevel > -2) && clt_parameters.rig.rig_mode_debug){
if (clt_parameters.show_map && (debugLevel > 0) && clt_parameters.rig.rig_mode_debug){
(new showDoubleFloatArrays()).showArrays(
disparity_bimap,
tilesX,
......@@ -1495,7 +1498,105 @@ if (debugLevel > -100) return true; // temporarily !
ImageDtt.BIDISPARITY_TITLES);
}
// grow around using all camera and inter-camera correlations (try to get low-textured,like our street)
// repeat multiple times (need to compare added tiles (after cleanup? - return number of tiles after cleanu-up and compare improvements)
int num_trusted = 0;
// final int tilesX = quadCLT_main.tp.getTilesX();
// final int tilesY = quadCLT_main.tp.getTilesY();
// grow around using all camera and inter-camera correlations (try to get low-textured,like our street pavement)
BiCamDSI biCamDSI = new BiCamDSI( tilesX, tilesY);
int min_added_tiles = 100;
for (int num_fill = 0; num_fill < 10; num_fill++) {
int num_new_trusted = biCamDSI.removeLTUntrusted(
disparity_bimap, // double [][] disparity_bimap,
clt_parameters.rig.lt_min_disparity, // double min_disparity, // = 0.0; // apply low texture to near objects
clt_parameters.rig.lt_trusted_strength, // double trusted_strength, // = 0.2; // strength sufficient without neighbors
clt_parameters.rig.lt_need_friends, // double need_friends, // = 0.4; // strength sufficient with neighbors support, fraction of lt_trusted_strength
clt_parameters.rig.lt_friends_diff, // double friends_diff, // = 0.2; // pix difference to neighbors to be considered a match (TODO: use tilted)
clt_parameters.rig.lt_friends_rdiff, // double friends_rdiff, // = 0.04; // additional relative pix per pixel of disparity
clt_parameters.rig.lt_min_friends_any, // int min_friends_any, // = 2; // minimal number of even weak friends
clt_parameters.rig.lt_min_friends_trusted, // int min_friends_trusted, // = 2; // minimal number of trusted (strong or already confirmed)
clt_parameters.rig.lt_friends_dist, // int friends_dist, // = 3; // how far to look for friends
debugLevel); // int debugLevel
if ((num_new_trusted - num_trusted) < min_added_tiles) {
if (debugLevel > -2) {
System.out.println("enhanceByRig(): pass="+num_fill+", number of added tiles = "+(num_new_trusted - num_trusted)+" < " +min_added_tiles+", done adding");
break;
}
} else {
if (debugLevel > -2) {
System.out.println("enhanceByRig(): pass="+num_fill+", number of added tiles = "+(num_new_trusted - num_trusted));
}
}
num_trusted = num_new_trusted;
disparity_bimap = fillPoorTextureByInter(
quadCLT_main, // QuadCLT quadCLT_main, // tiles should be set
quadCLT_aux, // QuadCLT quadCLT_aux,
clt_parameters, // EyesisCorrectionParameters.CLTParameters clt_parameters,
disparity_bimap, //double [][] disparity_bimap,
biCamDSI, // BiCamDSI biCamDSI,
threadsMax, // final int threadsMax, // maximal number of threads to launch
updateStatus, // final boolean updateStatus,
debugLevel-2); // final int debugLevel)// throws Exception
if (clt_parameters.show_map && (debugLevel > -2) && clt_parameters.rig.rig_mode_debug){ //OK
(new showDoubleFloatArrays()).showArrays(
disparity_bimap,
tilesX,
disparity_bimap[0].length/tilesX,
true,
quadCLT_main.image_name+"DSI_LT-N"+num_fill,
ImageDtt.BIDISPARITY_TITLES);
/*
for (int layer = 0; layer < disparity_bimap.length; layer ++) if (disparity_bimap[layer] != null){
for (int nTile = 0; nTile < disparity_bimap[layer].length; nTile++) {
if (!trusted_near[nTile]) disparity_bimap[layer][nTile] = Double.NaN;
}
}
for (int layer:ImageDtt.BIDISPARITY_STRENGTHS) if (disparity_bimap[layer] != null){
for (int nTile = 0; nTile < disparity_bimap[layer].length; nTile++) {
if (!trusted_near[nTile]) disparity_bimap[layer][nTile] = 0.0;
}
}
(new showDoubleFloatArrays()).showArrays( //wrong
disparity_bimap,
tilesX,
disparity_bimap[0].length/tilesX,
true,
quadCLT_main.image_name+"-DSI-LT-TRUSTED-N"+num_fill,
ImageDtt.BIDISPARITY_TITLES);
*/
}
}
int num_new_trusted = biCamDSI.removeLTUntrusted(
disparity_bimap, // double [][] disparity_bimap,
clt_parameters.rig.lt_min_disparity, // double min_disparity, // = 0.0; // apply low texture to near objects
clt_parameters.rig.lt_trusted_strength, // double trusted_strength, // = 0.2; // strength sufficient without neighbors
clt_parameters.rig.lt_need_friends, // double need_friends, // = 0.4; // strength sufficient with neighbors support, fraction of lt_trusted_strength
clt_parameters.rig.lt_friends_diff, // double friends_diff, // = 0.2; // pix difference to neighbors to be considered a match (TODO: use tilted)
clt_parameters.rig.lt_friends_rdiff, // double friends_rdiff, // = 0.04; // additional relative pix per pixel of disparity
clt_parameters.rig.lt_min_friends_any, // int min_friends_any, // = 2; // minimal number of even weak friends
clt_parameters.rig.lt_min_friends_trusted, // int min_friends_trusted, // = 2; // minimal number of trusted (strong or already confirmed)
clt_parameters.rig.lt_friends_dist, // int friends_dist, // = 3; // how far to look for friends
debugLevel); // int debugLevel
if (clt_parameters.show_map && (debugLevel > -2) && clt_parameters.rig.rig_mode_debug){ //OK
System.out.println("There are total "+num_new_trusted+" trusted tiles in ground truthe data");
(new showDoubleFloatArrays()).showArrays(
disparity_bimap,
tilesX,
disparity_bimap[0].length/tilesX,
true,
quadCLT_main.image_name+"DSI_LT-FINAL",
ImageDtt.BIDISPARITY_TITLES);
}
// if (debugLevel > -100) return null; // temporarily
// re-measure with ML data output
// int ml_hwidth = 2; // move to clt_parameters
......@@ -1521,6 +1622,7 @@ if (debugLevel > -100) return true; // temporarily !
disparity_bimap, // double [][] src_bimap,
clt_parameters, // EyesisCorrectionParameters.CLTParameters clt_parameters,
clt_parameters.rig.ml_hwidth, // int ml_hwidth
clt_parameters.rig.ml_fatzero, // double fatzero,
threadsMax, // final int threadsMax, // maximal number of threads to launch
updateStatus, // final boolean updateStatus,
debugLevel); // final int debugLevel);
......@@ -1538,6 +1640,7 @@ if (debugLevel > -100) return true; // temporarily !
clt_parameters.rig.ml_keep_hor_vert, // boolean keep_hor_vert,
clt_parameters.rig.ml_keep_tbrl, // boolean ml_keep_tbrl,
clt_parameters.rig.ml_keep_debug, // boolean keep_debug,
clt_parameters.rig.ml_fatzero, // double ml_fatzero,
clt_parameters.rig.ml_hwidth, // int ml_hwidth,
ml_data, // double [][] ml_data,
clt_parameters.rig.ml_show_ml, // boolean show,
......@@ -1547,6 +1650,144 @@ if (debugLevel > -100) return true; // temporarily !
//clt_3d_passes
}
public double [][] fillPoorTextureByInter(
QuadCLT quadCLT_main, // tiles should be set
QuadCLT quadCLT_aux,
EyesisCorrectionParameters.CLTParameters clt_parameters,
double [][] disparity_bimap,
BiCamDSI biCamDSI,
final int threadsMax, // maximal number of threads to launch
final boolean updateStatus,
final int debugLevel)// throws Exception
{
final int refine_inter = 2; // use inter-cam disparity for refinement
final int tilesX = quadCLT_main.tp.getTilesX();
final int tilesY = quadCLT_main.tp.getTilesY();
// grow around using all camera and inter-camera correlations (try to get low-textured,like our street pavement)
// BiCamDSI biCamDSI = new BiCamDSI( tilesX, tilesY);
double [] suggestedLTMeasurements = biCamDSI.suggestLTTiles(
disparity_bimap, // double [][] disparity_bimap,
null, // boolean [] trusted, // may be null if disparity is alreasdy NaN-ed
clt_parameters.rig.lt_min_disparity, // double min_disparity, // = 0.0; // apply low texture to near objects
clt_parameters.rig.lt_trusted_strength, // double trusted_strength, // = 0.2; // strength sufficient without neighbors
clt_parameters.rig.lt_need_friends, // double need_friends, // = 0.4; // strength sufficient with neighbors support, fraction of lt_trusted_strength
// clt_parameters.rig.lt_friends_diff, // double friends_diff, // = 0.2; // pix difference to neighbors to be considered a match (TODO: use tilted)
// clt_parameters.rig.lt_friends_rdiff, // double friends_rdiff, // = 0.04; // additional relative pix per pixel of disparity
// clt_parameters.rig.lt_min_friends_any, // int min_friends_any, // = 2; // minimal number of even weak friends
// clt_parameters.rig.lt_min_friends_trusted, // int min_friends_trusted, // = 2; // minimal number of trusted (strong or already confirmed)
// clt_parameters.rig.lt_friends_dist, // int friends_dist, // = 3; // how far to look for friends
// clt_parameters.rig.lt_replace_lone, // boolean replace_lone, // = true; // try to overwrite lone weak
clt_parameters.rig.lt_extend_dist, // int extend_dist, // = 3; // how far to extend around known tiles (probably should increase this value up to?
// dealing with neighbors variance
clt_parameters.rig.lt_wsigma, // double wsigma, // = 1.0; // influence of far neighbors diminish as a Gaussian with this sigma
clt_parameters.rig.lt_max_asigma, // double max_asigma, // = .15; // Maximal acceptable standard deviation of the neighbors (remove, then add)
clt_parameters.rig.lt_max_rsigma, // double max_rsigma, // = .05; // Maximal acceptable standard deviation of the neighbors (remove, then add)
debugLevel); // int debugLevel
double [][] disparity_bimap_lt = setBimapFromDisparityNaN(
suggestedLTMeasurements, // double [] disparity,
quadCLT_main, // QuadCLT quadCLT_main, // tiles should be set
quadCLT_aux, // QuadCLT quadCLT_aux,
clt_parameters, // EyesisCorrectionParameters.CLTParameters clt_parameters,
threadsMax, // final int threadsMax, // maximal number of threads to launch
updateStatus, // final boolean updateStatus,
debugLevel); // final int debugLevel);
if (clt_parameters.show_map && (debugLevel > -2) && clt_parameters.rig.rig_mode_debug){
(new showDoubleFloatArrays()).showArrays(
disparity_bimap_lt,
tilesX,
disparity_bimap[0].length/tilesX,
true,
quadCLT_main.image_name+"NEW_LT_MEASURED",
ImageDtt.BIDISPARITY_TITLES);
}
// refine just the new suggested measurements
double [][] prev_bimap = null;
int [] num_new = new int[1];
double [] scale_bad = new double [suggestedLTMeasurements.length];
for (int i = 0; i < scale_bad.length; i++) scale_bad[i] = 1.0;
prev_bimap = null;
boolean [] trusted_lt = null;
for (int nref = 0; nref < clt_parameters.rig.num_near_refine; nref++) {
// refine infinity using inter correlation
double [][] disparity_bimap_new = refineRigSel(
quadCLT_main, // QuadCLT quadCLT_main, // tiles should be set
quadCLT_aux, // QuadCLT quadCLT_aux,
disparity_bimap_lt, // double [][] src_bimap, // current state of measurements (or null for new measurement)
prev_bimap, // double [][] prev_bimap, // previous state of measurements or null
scale_bad, // double [] scale_bad,
refine_inter, // int refine_mode, // 0 - by main, 1 - by aux, 2 - by inter
false, // boolean keep_inf, // keep expected disparity 0.0 if it was so
0.0, // clt_parameters.rig.refine_min_strength , // double refine_min_strength, // do not refine weaker tiles
clt_parameters.rig.refine_tolerance , // double refine_tolerance, // do not refine if absolute disparity below
trusted_lt, // null, // trusted_lt, // tile_list, // ArrayList<Integer> tile_list, // or null
num_new, // int [] num_new,
clt_parameters, // EyesisCorrectionParameters.CLTParameters clt_parameters,
threadsMax, // final int threadsMax, // maximal number of threads to launch
updateStatus, // final boolean updateStatus,
debugLevel); // final int debugLevel);
prev_bimap = disparity_bimap_lt;
disparity_bimap_lt = disparity_bimap_new;
//low texture may have very poor individual correlations, do not check them at all
trusted_lt = getTrustedDisparityInter(
0.0, // clt_parameters.rig.lt_trusted_strength*clt_parameters.rig.lt_need_friends, // double min_inter_strength, // check correlation strength combined for all 3 correlations
clt_parameters.grow_disp_trust, // double max_trusted_disparity,
trusted_lt, // boolean [] was_trusted,
disparity_bimap_lt ); // double [][] bimap // current state of measurements
if (debugLevel > -2) {
System.out.println("enhanceByRig(): refined (lt) "+num_new[0]+" tiles");
}
if (num_new[0] < clt_parameters.rig.min_new) break;
}
trusted_lt = getTrustedDisparityInter(
clt_parameters.rig.lt_trusted_strength*clt_parameters.rig.lt_need_friends, // double min_inter_strength, // check correlation strength combined for all 3 correlations
clt_parameters.grow_disp_trust, // double max_trusted_disparity,
trusted_lt, // boolean [] was_trusted,
disparity_bimap_lt ); // double [][] bimap // current state of measurements
if (clt_parameters.show_map && (debugLevel > -2) && clt_parameters.rig.rig_mode_debug){
(new showDoubleFloatArrays()).showArrays(
disparity_bimap_lt,
tilesX,
disparity_bimap[0].length/tilesX,
true,
quadCLT_main.image_name+"NEW_LT_REFINED",
ImageDtt.BIDISPARITY_TITLES);
}
// combine new measured results with the previously known (new overwrites old
for (int nTile = 0; nTile < disparity_bimap_lt[0].length; nTile++) {
// if (trusted_lt[nTile] &&
// (!trusted_near[nTile] ||
// (disparity_bimap_infinity[ImageDtt.BI_STR_ALL_INDEX][nTile] > disparity_bimap[ImageDtt.BI_STR_ALL_INDEX][nTile]))) {
// start with unconditional use of new
if (trusted_lt[nTile]) {
for (int i = 0; i < disparity_bimap.length; i++) if (disparity_bimap!=null) {
disparity_bimap[i][nTile] = disparity_bimap_lt[i][nTile];
}
// trusted_near[nTile] = true;
}
}
if (clt_parameters.show_map && (debugLevel > -2) && clt_parameters.rig.rig_mode_debug){
(new showDoubleFloatArrays()).showArrays(
disparity_bimap_lt,
tilesX,
disparity_bimap[0].length/tilesX,
true,
quadCLT_main.image_name+"DSI_ADDED",
ImageDtt.BIDISPARITY_TITLES);
}
return disparity_bimap;
}
public void saveMlFile(
String ml_title,
String ml_directory,
......@@ -1561,6 +1802,7 @@ if (debugLevel > -100) return true; // temporarily !
boolean keep_hor_vert,
boolean ml_keep_tbrl,
boolean keep_debug,
double ml_fatzero,
int ml_hwidth,
double [][] ml_data,
boolean show,
......@@ -1572,7 +1814,7 @@ if (debugLevel > -100) return true; // temporarily !
int width = tilesX * ml_width;
int height = tilesY * ml_width;
String title = ml_title+ (use8bpp?"08":"32")+"B-"+(keep_aux?"A":"")+(keep_inter?"I":"")+(keep_hor_vert?"O":"")+(ml_keep_tbrl?"T":"")+
(keep_debug?"D":"")+"-OFFS"+disp_offset;
(keep_debug?"D":"")+"-FZ"+ml_fatzero+"-OFFS"+disp_offset;
int [] aux_indices = {
ImageDtt.ML_TOP_AUX_INDEX, // 8 - top pair 2d correlation center area (auxiliary camera)
ImageDtt.ML_BOTTOM_AUX_INDEX, // 9 - bottom pair 2d correlation center area (auxiliary camera)
......@@ -1846,16 +2088,69 @@ if (debugLevel > -100) return true; // temporarily !
disparity_array, // double [][] disparity_array,
null, // double [][] ml_data, // data for ML - 10 layers - 4 center areas (3x3, 5x5,..) per camera-per direction, 1 - composite, and 1 with just 1 data (target disparity)
clt_parameters, // EyesisCorrectionParameters.CLTParameters clt_parameters,
clt_parameters.fat_zero, // double fatzero,
threadsMax, // final int threadsMax, // maximal number of threads to launch
updateStatus, // final boolean updateStatus,
debugLevel); // final int debugLevel);
return disparity_bimap;
}
/**
* Perform rig measurement from the new predicted disparity values
* @param disparity array of predicted disparities, NaN - do not measure
* @param quadCLT_main main camera QuadCLT instance (should have tp initialized)
* @param quadCLT_aux auxiliary camera QuadCLT instance (should have tp initialized)
* @param clt_parameters various configuration parameters
* @param threadsMax maximal number of threads to use
* @param updateStatus update IJ status bar
* @param debugLevel debug level
* @return rig measurement results
*/
public double [][] setBimapFromDisparityNaN(
double [] disparity,
QuadCLT quadCLT_main, // tiles should be set
QuadCLT quadCLT_aux,
EyesisCorrectionParameters.CLTParameters clt_parameters,
final int threadsMax, // maximal number of threads to launch
final boolean updateStatus,
final int debugLevel){
int tile_op_all = clt_parameters.tile_task_op; //FIXME Use some constant?
final int tilesX = quadCLT_main.tp.getTilesX();
final int tilesY = quadCLT_main.tp.getTilesY();
int [][] tile_op = new int[tilesY][tilesX]; // common for both amin and aux
double [][] disparity_array = new double[tilesY][tilesX];
for (int nTile = 0; nTile < disparity.length; nTile++) {
if (!Double.isNaN(disparity[nTile])) {
int tileY = nTile / tilesX;
int tileX = nTile % tilesX;
tile_op[tileY][tileX] = tile_op_all;
disparity_array[tileY][tileX] = disparity[nTile];
}
}
double [][] disparity_bimap = measureRig(
quadCLT_main, // QuadCLT quadCLT_main, // tiles should be set
quadCLT_aux, //QuadCLT quadCLT_aux,
tile_op, // int [][] tile_op, // common for both amin and aux
disparity_array, // double [][] disparity_array,
null, // double [][] ml_data, // data for ML - 10 layers - 4 center areas (3x3, 5x5,..) per camera-per direction, 1 - composite, and 1 with just 1 data (target disparity)
clt_parameters, // EyesisCorrectionParameters.CLTParameters clt_parameters,
clt_parameters.fat_zero, // double fatzero,
threadsMax, // final int threadsMax, // maximal number of threads to launch
updateStatus, // final boolean updateStatus,
debugLevel); // final int debugLevel);
return disparity_bimap;
}
/**
* Select tiles that have small enough residual disparity to trust on each of the two cameras and the whole rig.
* @param quadCLT_main main camera QuadCLT instance (should have tp initialized)
* @param quadCLT_aux auxiliary camera QuadCLT instance (should have tp initialized)
* @param min_combo_strength minimal combined correlation strength for each camera and the rig
* @param max_trusted_disparity maximal disparity on a rig to trust (currently 4.0 pixels from zero each way)
* @param trusted_tolerance allow other cameras with scaled disparity if their disparity is under this value (for small baselines)
* @param bimap measured data
......@@ -1885,6 +2180,28 @@ if (debugLevel > -100) return true; // temporarily !
}
return trusted;
}
/**
* Select tiles that have small enough residual disparity and sufficient strength to trust for the whole rig only
* @param min_inter_strength minimal inter-camera correlation strength
* @param max_trusted_disparity maximal disparity on a rig to trust (currently 4.0 pixels from zero each way)
* @param bimap measured data
* @return per-tile array of trusted tiles
*/
boolean [] getTrustedDisparityInter(
double min_inter_strength, // check correlation strength combined for all 3 correlations
double max_trusted_disparity,
boolean [] was_trusted,
double [][] bimap // current state of measurements
) {
double trusted_inter = max_trusted_disparity;
boolean [] trusted = new boolean [bimap[ImageDtt.BI_DISP_CROSS_INDEX].length];
for (int i = 0; i < trusted.length; i++) {
trusted[i] = (Math.abs(bimap[ImageDtt.BI_DISP_CROSS_INDEX][i]) <= trusted_inter) &&
(bimap[ImageDtt.BI_STR_CROSS_INDEX][i] >= min_inter_strength) &&
((was_trusted == null) || was_trusted[i]);
}
return trusted;
}
/**
* Refine (re-measure with updated expected disparity) tiles. If refine_min_strength and refine_tolerance are both
......@@ -2006,6 +2323,7 @@ if (debugLevel > -100) return true; // temporarily !
disparity_array, // double [][] disparity_array,
null, // double [][] ml_data, // data for ML - 10 layers - 4 center areas (3x3, 5x5,..) per camera-per direction, 1 - composite, and 1 with just 1 data (target disparity)
clt_parameters, // EyesisCorrectionParameters.CLTParameters clt_parameters,
clt_parameters.fat_zero, // double fatzero,
threadsMax, //final int threadsMax, // maximal number of threads to launch
updateStatus, // final boolean updateStatus,
debugLevel); // final int debugLevel)
......@@ -2086,6 +2404,7 @@ if (debugLevel > -100) return true; // temporarily !
disparity_array, // double [][] disparity_array,
null, // double [][] ml_data, // data for ML - 10 layers - 4 center areas (3x3, 5x5,..) per camera-per direction, 1 - composite, and 1 with just 1 data (target disparity)
clt_parameters, // EyesisCorrectionParameters.CLTParameters clt_parameters,
clt_parameters.fat_zero, // double fatzero,
threadsMax, //final int threadsMax, // maximal number of threads to launch
updateStatus, // final boolean updateStatus,
debugLevel); // final int debugLevel)
......@@ -2130,7 +2449,7 @@ if (debugLevel > -100) return true; // temporarily !
double [] scale_bad,
int [][] tile_op, // common for both amin and aux
double [][] disparity_array,
int refine_mode, // 0 - by main, 1 - by aux, 2 - by inter
int refine_mode, // 0 - by main, 1 - by aux, 2 - by inter, 3 - inter-dx
boolean keep_inf, // keep expected disparity 0.0 if it was so
double refine_min_strength, // do not refine weaker tiles
double refine_tolerance, // do not refine if absolute disparity below
......@@ -2168,11 +2487,17 @@ if (debugLevel > -100) return true; // temporarily !
strength = src_bimap[ImageDtt.BI_ASTR_FULL_INDEX][nTile];
disp_scale = disp_scale_aux;
break;
default:
case 2:
diff_disp = src_bimap[ImageDtt.BI_DISP_CROSS_INDEX][nTile];
diff_prev= (prev_bimap == null)? Double.NaN:prev_bimap[ImageDtt.BI_DISP_CROSS_INDEX][nTile];
strength = src_bimap[ImageDtt.BI_STR_CROSS_INDEX][nTile];
disp_scale = disp_scale_inter;
break;
default: // case 3
diff_disp = src_bimap[ImageDtt.BI_DISP_CROSS_DX_INDEX][nTile];
diff_prev= (prev_bimap == null)? Double.NaN:prev_bimap[ImageDtt.BI_DISP_CROSS_DX_INDEX][nTile];
strength = src_bimap[ImageDtt.BI_STR_CROSS_INDEX][nTile];
disp_scale = disp_scale_inter;
}
// strong enough?
if (strength < refine_min_strength) return false;
......@@ -2213,7 +2538,8 @@ if (debugLevel > -100) return true; // temporarily !
}
if (debug_this) System.out.println("prepRefineTile():target_diff "+ src_bimap[ImageDtt.BI_TARGET_INDEX][nTile]+","+diff_disp+","+scale_bad[nTile]);
if (Math.abs((new_disp - ref_target)/new_disp) < refine_tolerance) return false;
// if (Math.abs((new_disp - ref_target)/new_disp) < refine_tolerance) return false;
if (Math.abs(new_disp - ref_target) < refine_tolerance) return false;
disparity_array[tileY][tileX] = new_disp;
tile_op[tileY][tileX] = tile_op_all;
......@@ -2228,6 +2554,7 @@ if (debugLevel > -100) return true; // temporarily !
double [][] disparity_array,
double [][] ml_data, // data for ML - 10 layers - 4 center areas (3x3, 5x5,..) per camera-per direction, 1 - composite, and 1 with just 1 data (target disparity)
EyesisCorrectionParameters.CLTParameters clt_parameters,
double fatzero,
final int threadsMax, // maximal number of threads to launch
final boolean updateStatus,
final int debugLevel){
......@@ -2237,8 +2564,8 @@ if (debugLevel > -100) return true; // temporarily !
image_dtt.clt_bi_quad (
clt_parameters, // final EyesisCorrectionParameters.CLTParameters clt_parameters,
fatzero, // final double fatzero, // May use correlation fat zero from 2 different parameters - fat_zero and rig.ml_fatzero
tile_op, // final int [][] tile_op_main, // [tilesY][tilesX] - what to do - 0 - nothing for this tile
// tile_op, // final int [][] tile_op_aux, // [tilesY][tilesX] - what to do - 0 - nothing for this tile
disparity_array, // final double [][] disparity_array, // [tilesY][tilesX] - individual per-tile expected disparity
quadCLT_main.image_data, // final double [][][] image_data_main, // first index - number of image in a quad
quadCLT_aux.image_data, // final double [][][] image_data_aux, // first index - number of image in a quad
......@@ -2270,6 +2597,7 @@ if (debugLevel > -100) return true; // temporarily !
double [][] src_bimap,
EyesisCorrectionParameters.CLTParameters clt_parameters,
int ml_hwidth,
double fatzero,
final int threadsMax, // maximal number of threads to launch
final boolean updateStatus,
final int debugLevel){
......@@ -2335,6 +2663,7 @@ if (debugLevel > -100) return true; // temporarily !
disparity_array, // double [][] disparity_array,
ml_data, // double [][] ml_data, // data for ML - 10 layers - 4 center areas (3x3, 5x5,..) per camera-per direction, 1 - composite, and 1 with just 1 data (target disparity)
clt_parameters, // EyesisCorrectionParameters.CLTParameters clt_parameters,
fatzero, // double fatzero,
threadsMax, // maximal number of threads to launch // final int threadsMax, // maximal number of threads to launch
updateStatus, // final boolean updateStatus,
debugLevel); // final int debugLevel)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment