Commit 640b8df3 authored by Andrey Filippov's avatar Andrey Filippov

Cleaned up pattern matching, implemented re-centering for correlations,

working snapshot
parent c8f99e6d
......@@ -293,6 +293,22 @@ public class DoubleFHT {
return result;
}
public double [] getFrequencyFilter (
double [] data, // just to find out size
double highPassSigma,
double lowPassSigma
) {
double [] filter= null;
if ((highPassSigma> 0.0) || (lowPassSigma> 0.0)) {
updateMaxN(data);
createFrequencyFilter(highPassSigma, lowPassSigma); // for repetitive calls will reuse mask
filter =this.freqMask;
}
return filter;
}
/**
* Invert kernel for deconvolutions. Kernel is assumed to fade near edges
* @param data source kernel, square, power of 2 sides. Data is destroyed
......@@ -314,7 +330,6 @@ public class DoubleFHT {
filter =this.freqMask;
}
return invert (data, fat_zero, filter);
}
/**
......@@ -401,9 +416,7 @@ public class DoubleFHT {
}
public double[] phaseCorrelate(double[] first, double phaseCoeff, double high_pass, double low_pass, // high/low
// pass
// filtering
double[] fht_save) { // null-OK
double[] fht_save) { // null-OK
updateMaxN(first);
double[] filter = null;
if ((high_pass > 0) || (low_pass > 0)) {
......@@ -503,7 +516,10 @@ public class DoubleFHT {
* @return phase correlation, first still contains original transformed!
*/
public double[] phaseCorrelatePattern( // new
double[] first, double[] secondFD, double phaseCoeff, double[] filter, // high/low pass filtering
double[] first,
double[] secondFD,
double phaseCoeff,
double[] filter, // high/low pass filtering
double[] first_save) { // null-OK
if (first.length != secondFD.length) {
IJ.showMessage("Error", "Correlation arrays should be the same size");
......@@ -537,7 +553,9 @@ public class DoubleFHT {
*/
public double[] convolvePattern( // new
double[] first, double[] secondFD, double[] filter, // high/low pass filtering
double[] first,
double[] secondFD,
double[] filter, // high/low pass filtering
double[] first_save) { // null-OK
if (first.length != secondFD.length) {
IJ.showMessage("Error", "Correlation arrays should be the same size");
......@@ -595,11 +613,16 @@ public class DoubleFHT {
return first;
}
public double[] convolve(double[] first, double[] second) {
public double[] convolve(
double[] first,
double[] second) {
return convolve(first, second, null);
}
public double[] convolve(double[] first, double[] second, double[] filter) { // high/low pass filtering
public double[] convolve(
double[] first,
double[] second,
double[] filter) { // high/low pass filtering
// System.out.println("correlate");
if (first.length != second.length) {
IJ.showMessage("Error", "Correlation arrays should be the same size");
......
......@@ -544,13 +544,13 @@ public class ComboMatch {
}
imp_obj.setRoi(roi);
// calculate statistics for cor_ret[i]
double [][] stats = new double [object_stack.length][];
CorrelationPeakStats [] stats = new CorrelationPeakStats [object_stack.length];
double search_rad = 15.0;
double frac_max = 0.5;
double other_rad = 25;
for (int i = 0; i < object_stack.length; i++) {
double [] a_cent= {corr_size/2+centers[i][0],corr_size/2+centers[i][1]};
stats[i]=ObjectLocation.getMaxLocArea(
stats[i]=new CorrelationPeakStats(
corr_ret[i], // double [] data, // square data
a_cent, // centers[i], // double [] cent_xy, // if null, use center of the square
search_rad, // double radius, // search for maximum within this radius
......@@ -567,7 +567,8 @@ public class ComboMatch {
ObjectLocation ol = object_list.get(i);
System.out.println(String.format("%2d (%4d/%4d): %7.5f %7.3f %7.3f %7.3f %7.3f %7.3f %17s",
i, ol.getPixels()[0], ol.getPixels()[1],
stats[i][0],stats[i][1],stats[i][2],stats[i][3],stats[i][4]+centers[i][0],stats[i][5]+centers[i][1],
stats[i].best_d,stats[i].eff_rad,stats[i].elong,stats[i].dist,
stats[i].cent_offs[0]+centers[i][0],stats[i].cent_offs[1]+centers[i][1],
ol.getName()));
}
System.out.println();
......@@ -868,7 +869,8 @@ public class ComboMatch {
ImagePlus imp_pat_match = maps_collection.patternMatchDualWrap (
gpu_pair, // int [] indices, // null or which indices to use (normally just 2 for pairwise comparison)
affines, // double [][][] affines, // null or [indices.length][2][3]
null); // warp); // FineXYCorr warp)
null, // warp); // FineXYCorr warp)
null); // double [][] ground_planes) TODO: add calculation of the ground plane for single images
// imp_pat_match.show();
}
......@@ -895,27 +897,33 @@ public class ComboMatch {
boolean batch_mode = true; // false; // true;
boolean ignore_prev_rms = true;
Rectangle woi = new Rectangle(); // used to return actual woi from correlateOrthoPair()
double [][] ground_planes = null;
for (int zi = 0; zi < zooms.length; zi++) {
zoom_lev = zooms[zi];
if (zoom_lev >=1000) {
break;
}
boolean show_vf = render_match || pattern_match;
if (render_match || pattern_match) {
ground_planes = new double [gpu_pair.length][];
}
// will modify affines[1], later add jtj, weight, smth. else?
FineXYCorr warp = maps_collection.correlateOrthoPair(
clt_parameters, // CLTParameters clt_parameters,
clt_parameters, // CLTParameters clt_parameters,
(process_correlation? pairwiseOrthoMatch: null), //PairwiseOrthoMatch pairwiseOrthoMatch, // will return statistics
frac_remove, // double frac_remove, // = 0.25
metric_error, // double metric_error,
frac_remove, // double frac_remove, // = 0.25
metric_error, // double metric_error,
ignore_prev_rms, // boolean ignore_prev_rms,
num_tries_fit, // = 5int num_tries, // = 5
true, // boolean calc_warp,
batch_mode, // boolean batch_mode,
gpu_pair, // String [] gpu_spair,
affines, // double [][][] affines, // on top of GPS offsets
num_tries_fit, // = 5int num_tries, // = 5
true, // boolean calc_warp,
batch_mode, // boolean batch_mode,
gpu_pair, // String [] gpu_spair,
affines, // double [][][] affines, // on top of GPS offsets
woi, // Rectangle woi,
zoom_lev, // int zoom_lev,
debugLevel); // final int debugLevel)
zoom_lev, // int zoom_lev,
show_vf, // boolean show_vf,
ground_planes, // double [][] ground_planes, // null or double[2] - will return ground planes
debugLevel); // final int debugLevel)
if (warp == null) {
System.out.println("Failed correlateOrthoPair()");
return false;
......@@ -950,8 +958,9 @@ public class ComboMatch {
if (pattern_match) {
ImagePlus imp_pat_match = maps_collection.patternMatchDualWrap (
gpu_pair, // int [] indices, // null or which indices to use (normally just 2 for pairwise comparison)
affines, // double [][][] affines, // null or [indices.length][2][3]
warp); // FineXYCorr warp)
affines, // double [][][] affines, // null or [indices.length][2][3]
warp, // FineXYCorr warp)
ground_planes); // double [][] ground_planes); // null or
// imp_pat_match.show();
}
if (render_match) {
......
package com.elphel.imagej.orthomosaic;
import java.util.Arrays;
import com.elphel.imagej.tileprocessor.TileNeibs;
import Jama.EigenvalueDecomposition;
import Jama.Matrix;
public class CorrelationPeakStats {
public double best_d;
public double eff_rad;
public double elong;
public double dist;
public double [] cent_offs;
public double max_other;
public CorrelationPeakStats(
double [] data, // square data
double [] cent_xy, // if null, use center of the square
double radius, // search for maximum within this radius
double frac_max,
double other_radius,
int debugLevel) {
double [] stats = getStatsNaN(
data, // square data
cent_xy, // if null, use center of the square
radius, // search for maximum within this radius
frac_max,
other_radius,
debugLevel);
best_d = stats[0];
eff_rad = stats[1];
elong = stats[2];
dist = stats[3];
cent_offs = new double [] {stats[4], stats[5]};
max_other = stats[6];
}
int [] getIntOffset() {
return new int[] {(int) Math.round(cent_offs[0]), (int) Math.round(cent_offs[1])};
}
/**
* Search for maximum within specified radius from the center or specified offset from the center
* @param data square data
* @param cent_xy offset from the square center to search
* @param radius search for maximum within this radius
* @param frac_max fraction of maximum to measure area
* @param other_radius measure maximal disconnected pixel fraction of the maximum within this radius
* @return {maximum, area, dist, x, y}. No interpolation yet, each returned value is integer.
* Returns null if no local maximum within area. x,y are offsets from the (provided) center
*/
public static double [] getStatsNaN(
double [] data, // square data
double [] cent_xy, // if null, use center of the square
double radius, // search for maximum within this radius
double frac_max,
double other_radius,
int debugLevel) {
double [] rslt = getStats(
data, // square data
cent_xy, // if null, use center of the square
radius, // search for maximum within this radius
frac_max,
other_radius,
debugLevel);
if (rslt != null) {
return rslt;
} else {
return new double[] {Double.NaN,Double.NaN,Double.NaN,Double.NaN,Double.NaN,Double.NaN,Double.NaN};
}
}
public static double [] getStats(
double [] data, // square data
double [] cent_xy, // if null, use center of the square
double radius, // search for maximum within this radius
double frac_max,
double other_radius, // may be 0 if not needed
int debugLevel) {
boolean debug = debugLevel > 1;
int size = (int) Math.sqrt(data.length);
if (cent_xy == null) {
cent_xy = new double [] {size/2, size/2};
}
double maxr2 = radius * radius;
double best_d = 0;
int best_indx = -1;
int min_x = Math.max(1, (int) Math.floor(cent_xy[0]-radius));
int min_y = Math.max(1, (int) Math.floor(cent_xy[1]-radius));
int max_x = Math.min(size-2, (int) Math.ceil(cent_xy[0]+radius));
int max_y = Math.min(size-2, (int) Math.ceil(cent_xy[1]+radius));
for (int y = min_y; y <= max_y; y++) { // do not search on very edges
double dy = (y-cent_xy[1]);
double y2 = dy*dy;
if (y2 < maxr2) {
for (int x = min_x; x <= max_x; x++) {
double dx = x - cent_xy[0];
double r2 = y2 + dx*dx;
if (r2 < maxr2) {
int indx = y * size + x;
double d = data[indx];
if (d > best_d) {
best_indx = indx;
best_d = d;
}
}
}
}
}
if (best_indx < 0) {
return null;
}
// is it local max?
if ((data[best_indx - 1] > best_d) || (data[best_indx + 1] > best_d) ||
(data[best_indx - size] > best_d) || (data[best_indx + size] > best_d)) {
return null; // on the edge, not a local max
}
boolean [] above_thresh = new boolean [data.length];
double thresh = best_d * frac_max;
for (int i = 0; i < data.length; i++) {
above_thresh[i] = data[i] > thresh;
}
int [] clusters = (new TileNeibs(size,size)).enumerateClusters(
above_thresh, // boolean [] tiles,
null, // int [] num_clusters,
false); // boolean ordered)
int center_cluster = clusters[best_indx];
int best_x = best_indx % size;
int best_y = best_indx / size;
double xc = best_x - cent_xy[0];
double yc = best_y - cent_xy[1];
double s0=0, sx=0, sy=0, sx2 = 0, sy2=0, sxy = 0;
for (int i = 0; i < clusters.length; i++) {
if (clusters[i] == center_cluster) {
double y = i / size - (yc + cent_xy[1]); //(yc + cent_xy[1]) - absolute, from (0,0)
double x = i % size - (xc + cent_xy[0]);
double w = data[i]-thresh;
s0 += w;
sx += w * x;
sy += w * y;
sx2 += w * x * x;
sy2 += w * y * y;
sxy += w * x * y;
}
}
double cxx = sx2 - sx * sx / s0, cyy= sy2 - sy * sy / s0, cxy = sxy - sx * sy / s0;
/*
* sum(Mi*(Xi-avg(X))^2) = SX2 - SX^2/S0
* sum(Mi*(Yi-avg(Y))^2) = SY2 - SY^2/S0
* sum(Mi*(Xi-avg(X)*(Yi-avg(Y))) = SXY - SX*SY / S0
*/
Matrix covar = new Matrix(new double[][] {{cxx, cxy},{cxy,cyy}});
double [] cent_offs = {sx/s0 + xc,sy/s0 + yc};
EigenvalueDecomposition eig = covar.eig();
double [] eigval = {eig.getD().get(0, 0),eig.getD().get(1, 1)};
Arrays.sort(eigval); // ascending
double elong = Math.sqrt(eigval[1]/eigval[0]);
double eff_rad = Math.sqrt(Math.sqrt(eigval[1]*eigval[0]));
double dist = Math.sqrt(cent_offs[0]*cent_offs[0] + cent_offs[1]*cent_offs[1]);
double max_other = 0.0;
if (other_radius > 0) { // only calculate if asked for
double or2 = other_radius*other_radius;
min_x = Math.max(1, (int) Math.floor(best_x-other_radius));
min_y = Math.max(1, (int) Math.floor(best_y-other_radius));
max_x = Math.min(size-2, (int) Math.ceil(best_x+other_radius));
max_y = Math.min(size-2, (int) Math.ceil(best_y+other_radius));
for (int y = min_y; y <= max_y; y++) { // do not search on very edges
double dy = (y-best_y);
double y2 = dy*dy;
if (y2 < or2) {
for (int x = min_x; x <= max_x; x++) {
double dx = x - best_x;
double r2 = y2 + dx*dx;
if (r2 < or2) {
int indx = y * size + x;
if (clusters[indx] != center_cluster) { // only disconnected from the central area
double d = data[indx];
if (d > max_other) {
max_other = d;
}
}
}
}
}
}
max_other /= best_d;
}
if (debug) {
System.out.println("\ncenter offset ["+(sx/s0)+","+(sy/s0)+"] , from center: ["+cent_offs[0]+","+cent_offs[1]+"]");
System.out.println("Covariance matrix:");
covar.print(8, 6);
System.out.println("eig.getV()");
eig.getV().print(8, 6);
System.out.println("eig.getD()");
eig.getD().print(8, 6);
System.out.println(String.format("best_d=%7.5f, rad=%6.3f, elong=%6.3f, dist=%6.3f, dx=%6.3f, dy=%6.3f, mo=%5.3f",
best_d, eff_rad, elong, dist, cent_offs[0], cent_offs[1], max_other));
}
return new double [] {best_d, eff_rad, elong, dist, cent_offs[0], cent_offs[1], max_other} ;
}
}
......@@ -13,7 +13,8 @@ public class ItemPatternMatch {
public void setMatches(double [] matches) {
sub_matches = matches;
}
public void setMatch(int indx, double match_value) {
public void setMatch(int indx, double match_value) { // not used
if (sub_matches == null) {
sub_matches = new double [indx+1];
Arrays.fill(sub_matches, Double.NaN);
......
......@@ -119,239 +119,6 @@ public class ObjectLocation {
}
return dcrop;
}
/**
* Search for maximum within specified radius from the center or specified offset from the center
* @param data square data
* @param cent_xy offset from the square center to search
* @param radius search for maximum within this radius
* @param frac_max fraction of maximum to measure area
* @param other_radius measure maximal disconnected pixel fraction of the maximum within this radius
* @return {maximum, area, dist, x, y}. No interpolation yet, each returned value is integer.
* Returns null if no local maximum within area. x,y are offsets from the (provided) center
*/
public static double [] getMaxLocAreaNaN(
double [] data, // square data
double [] cent_xy, // if null, use center of the square
double radius, // search for maximum within this radius
double frac_max,
double other_radius,
int debugLevel) {
double [] rslt = getMaxLocArea(
data, // square data
cent_xy, // if null, use center of the square
radius, // search for maximum within this radius
frac_max,
other_radius,
debugLevel);
if (rslt != null) {
return rslt;
} else {
return new double[] {Double.NaN,Double.NaN,Double.NaN,Double.NaN,Double.NaN,Double.NaN,Double.NaN};
}
}
public static double [] getMaxLocArea(
double [] data, // square data
double [] cent_xy, // if null, use center of the square
double radius, // search for maximum within this radius
double frac_max,
double other_radius,
int debugLevel) {
boolean debug = debugLevel > 1;
int size = (int) Math.sqrt(data.length);
if (cent_xy == null) {
cent_xy = new double [] {size/2, size/2};
}
double maxr2 = radius * radius;
double best_d = 0;
int best_indx = -1;
int min_x = Math.max(1, (int) Math.floor(cent_xy[0]-radius));
int min_y = Math.max(1, (int) Math.floor(cent_xy[1]-radius));
int max_x = Math.min(size-2, (int) Math.ceil(cent_xy[0]+radius));
int max_y = Math.min(size-2, (int) Math.ceil(cent_xy[1]+radius));
for (int y = min_y; y <= max_y; y++) { // do not search on very edges
double dy = (y-cent_xy[1]);
double y2 = dy*dy;
if (y2 < maxr2) {
for (int x = min_x; x <= max_x; x++) {
double dx = x - cent_xy[0];
double r2 = y2 + dx*dx;
if (r2 < maxr2) {
int indx = y * size + x;
double d = data[indx];
if (d > best_d) {
best_indx = indx;
best_d = d;
}
}
}
}
}
// is it local max?
if ((data[best_indx - 1] > best_d) || (data[best_indx + 1] > best_d) ||
(data[best_indx - size] > best_d) || (data[best_indx + size] > best_d)) {
return null; // on the edge, not a local max
}
boolean [] above_thresh = new boolean [data.length];
double thresh = best_d * frac_max;
for (int i = 0; i < data.length; i++) {
above_thresh[i] = data[i] > thresh;
}
int [] clusters = (new TileNeibs(size,size)).enumerateClusters(
above_thresh, // boolean [] tiles,
null, // int [] num_clusters,
false); // boolean ordered)
int center_cluster = clusters[best_indx];
int best_x = best_indx % size;
int best_y = best_indx / size;
double xc = best_x - cent_xy[0];
double yc = best_y - cent_xy[1];
double s0=0, sx=0, sy=0, sx2 = 0, sy2=0, sxy = 0;
for (int i = 0; i < clusters.length; i++) {
if (clusters[i] == center_cluster) {
double y = i / size - (yc + cent_xy[1]); //(yc + cent_xy[1]) - absolute, from (0,0)
double x = i % size - (xc + cent_xy[0]);
double w = data[i]-thresh;
s0 += w;
sx += w * x;
sy += w * y;
sx2 += w * x * x;
sy2 += w * y * y;
sxy += w * x * y;
}
}
double cxx = sx2 - sx * sx / s0, cyy= sy2 - sy * sy / s0, cxy = sxy - sx * sy / s0;
/*
* sum(Mi*(Xi-avg(X))^2) = SX2 - SX^2/S0
* sum(Mi*(Yi-avg(Y))^2) = SY2 - SY^2/S0
* sum(Mi*(Xi-avg(X)*(Yi-avg(Y))) = SXY - SX*SY / S0
*/
Matrix covar = new Matrix(new double[][] {{cxx, cxy},{cxy,cyy}});
double [] cent_offs = {sx/s0 + xc,sy/s0 + yc};
EigenvalueDecomposition eig = covar.eig();
double [] eigval = {eig.getD().get(0, 0),eig.getD().get(1, 1)};
Arrays.sort(eigval); // ascending
double elong = Math.sqrt(eigval[1]/eigval[0]);
double eff_rad = Math.sqrt(Math.sqrt(eigval[1]*eigval[0]));
double dist = Math.sqrt(cent_offs[0]*cent_offs[0] + cent_offs[1]*cent_offs[1]);
double max_other = 0.0;
if (other_radius > 0) { // only calculate if asked for
double or2 = other_radius*other_radius;
min_x = Math.max(1, (int) Math.floor(best_x-other_radius));
min_y = Math.max(1, (int) Math.floor(best_y-other_radius));
max_x = Math.min(size-2, (int) Math.ceil(best_x+other_radius));
max_y = Math.min(size-2, (int) Math.ceil(best_y+other_radius));
for (int y = min_y; y <= max_y; y++) { // do not search on very edges
double dy = (y-best_y);
double y2 = dy*dy;
if (y2 < or2) {
for (int x = min_x; x <= max_x; x++) {
double dx = x - best_x;
double r2 = y2 + dx*dx;
if (r2 < or2) {
int indx = y * size + x;
if (clusters[indx] != center_cluster) { // only disconnected from the central area
double d = data[indx];
if (d > max_other) {
max_other = d;
}
}
}
}
}
}
max_other /= best_d;
}
if (debug) {
System.out.println("\ncenter offset ["+(sx/s0)+","+(sy/s0)+"] , from center: ["+cent_offs[0]+","+cent_offs[1]+"]");
System.out.println("Covariance matrix:");
covar.print(8, 6);
System.out.println("eig.getV()");
eig.getV().print(8, 6);
System.out.println("eig.getD()");
eig.getD().print(8, 6);
System.out.println(String.format("best_d=%7.5f, rad=%6.3f, elong=%6.3f, dist=%6.3f, dx=%6.3f, dy=%6.3f, mo=%5.3f",
best_d, eff_rad, elong, dist, cent_offs[0], cent_offs[1], max_other));
}
return new double [] {best_d, eff_rad, elong, dist, cent_offs[0], cent_offs[1], max_other} ;
}
public static double [] getMaxLocAreaInt(
double [] data, // square data
double [] cent_xy, // if null, use center of the square
double radius, // search for maximum within this radius
double frac_max) {
int size = (int) Math.sqrt(data.length);
if (cent_xy == null) {
cent_xy = new double [] {size/2, size/2};
}
double maxr2 = radius * radius;
double best_d = 0;
int best_indx = -1;
int min_x = Math.max(1, (int) Math.floor(cent_xy[0]-radius));
int min_y = Math.max(1, (int) Math.floor(cent_xy[1]-radius));
int max_x = Math.min(size-2, (int) Math.ceil(cent_xy[0]+radius));
int max_y = Math.min(size-2, (int) Math.ceil(cent_xy[1]+radius));
for (int y = min_y; y <= max_y; y++) { // do not search on very edges
double dy = (y-cent_xy[1]);
double y2 = dy*dy;
if (y2 < maxr2) {
for (int x = min_x; x <= max_x; x++) {
double dx = x - cent_xy[0];
double r2 = y2 + dx*dx;
if (r2 < maxr2) {
int indx = y * size + x;
double d = data[indx];
if (d > best_d) {
best_indx = indx;
best_d = d;
}
}
}
}
}
// is it local max?
if ((data[best_indx - 1] > best_d) || (data[best_indx + 1] > best_d) ||
(data[best_indx - size] > best_d) || (data[best_indx + size] > best_d)) {
return null; // on the edge, not a local max
}
boolean [] above_thresh = new boolean [data.length];
double thresh = best_d * frac_max;
for (int i = 0; i < data.length; i++) {
above_thresh[i] = data[i] > thresh;
}
int [] clusters = (new TileNeibs(size,size)).enumerateClusters(
above_thresh, // boolean [] tiles,
null, // int [] num_clusters,
false); // boolean ordered)
int center_cluster = clusters[best_indx];
double area_max = 0.0;
for (int i = 0; i < clusters.length; i++) {
if (clusters[i] == center_cluster) {
area_max += 1;
}
}
double x_max = best_indx % size - cent_xy[0];
double y_max = best_indx / size - cent_xy[1];
double dist = Math.sqrt(y_max * y_max + x_max * x_max);
return new double [] {best_d, area_max, dist, x_max, y_max} ;
}
public static double [] getPatternCenter(
double [] data_in,
......@@ -429,6 +196,7 @@ public class ObjectLocation {
corr_pattern, // final double [] pattern, // [psize*psize]
false, // final boolean convolve, // convolve, not correlate
phaseCoeff, // final double phaseCoeff,
0, // final double lpf_sigma, // 0 - do not filter
debugLevel); // final int debugLevel)
if (corr_ret != null) {
System.arraycopy(corr_out, 0, corr_ret, 0, corr_out.length);
......
......@@ -3009,6 +3009,7 @@ public class OrthoMap implements Comparable <OrthoMap>, Serializable{
* @param num_refine number of the plane generation refine passes
* @param frac_above remove high fraction of all tiles
* @param frac_below remove low fraction of all tiles
* @param tile_plane_elev null or double[2][] will return per-tile elevations
* @param debug_title Name of the debug image or null to skip
*
* @return plane parameters - all metric, relative to the vertical point:
......@@ -3023,6 +3024,7 @@ public class OrthoMap implements Comparable <OrthoMap>, Serializable{
int num_refine,
double frac_above,
double frac_below,
double [] tile_plane_elev, // null or double[2][] will return per-tile elevations
String debug_title) {
final int num_bins = 1024;
int diff_zoom_lev = zoom_lev - orig_zoom_level;
......@@ -3062,6 +3064,16 @@ public class OrthoMap implements Comparable <OrthoMap>, Serializable{
width, // final int width,
pix_vertical); // final double [] xy0)
}
if (tile_plane_elev != null) {
for (int pix = 0; pix < elev.length; pix++) {
int px = pix % width;
int py = pix / width;
double x = px - plane_pix[3];
double y = py - plane_pix[4];
double plane_elev = plane_pix[2]+ x *plane_pix[0] +y * plane_pix[1];
tile_plane_elev[pix] = elev [pix] - plane_elev;
}
}
if (debug_title != null) {
String [] dbg_titles = {"elev", "plane", "diff"};
double [][] dbg_img = new double [dbg_titles.length][elev.length];
......@@ -3082,7 +3094,7 @@ public class OrthoMap implements Comparable <OrthoMap>, Serializable{
debug_title,
dbg_titles);
}
return new double [] {plane_pix[0]*pix_size, plane_pix[1]*pix_size, plane_pix[2]}; // no xy0
return new double [] {plane_pix[0]/pix_size, plane_pix[1]/pix_size, plane_pix[2]}; // no xy0
}
......@@ -3259,7 +3271,7 @@ public class OrthoMap implements Comparable <OrthoMap>, Serializable{
double [] pix_vertical = {vert_meters[0]/pix_size,vert_meters[1]/pix_size}; // image center in pixels
double max_r_diff = metric_error * camera_height / pix_size;
double [] plane_pix =
{plane_metric[0]/pix_size,plane_metric[1]/pix_size,plane_metric[2],pix_vertical[0],pix_vertical[1]};
{plane_metric[0]*pix_size,plane_metric[1]*pix_size,plane_metric[2],pix_vertical[0],pix_vertical[1]};
boolean [] mask = removeHighProjection (
elev, // final double [] data,
null, // final boolean [] mask_in, // will be modified if provided
......@@ -3526,6 +3538,7 @@ public class OrthoMap implements Comparable <OrthoMap>, Serializable{
final double [] pattern, // [psize*psize]
final boolean convolve, // convolve, not correlate
final double phaseCoeff,
final double lpf_sigma, // 0 - do not filter
final int debugLevel) {
final int height = data.length/width;
final double [] dout = new double [data.length];
......@@ -3548,15 +3561,15 @@ public class OrthoMap implements Comparable <OrthoMap>, Serializable{
final DoubleFHT doubleFHT0 = new DoubleFHT();
final double [] patternFD = pattern.clone();
doubleFHT0.transformPattern(patternFD);
final double [] filter = null; // probably not needed
if ((width== psize) && (height == psize)) {
return testPhaseCorr(
return correlateWithPatternSquare(
data, // double [] data,
pattern, // double [] pattern,
convolve, //final boolean convolve, // convolve, not correlate
window, // double [] wnd,
phaseCoeff); // double phaseCoeff)
phaseCoeff, // double phaseCoeff)
lpf_sigma); // double lpf_sigma)
}
final int tilesX = (int) Math.ceil(width/(psize/2)) + 1;
......@@ -3580,7 +3593,11 @@ public class OrthoMap implements Comparable <OrthoMap>, Serializable{
public void run() {
double [] dtile = new double [pattern.length];
TileNeibs tn = new TileNeibs(psize,psize);
DoubleFHT doubleFHT = new DoubleFHT();
DoubleFHT doubleFHT = new DoubleFHT();
double [] filter = doubleFHT.getFrequencyFilter (
dtile, // double [] data, // just to find out size
0, // double highPassSigma,
lpf_sigma); // double lowPassSigma
for (int nTile = ai.getAndIncrement(); nTile < ntiles; nTile = ai.getAndIncrement()){
int tileX = tileX0 - 1 + 2 * (nTile % ntilesX); // nTile % ntilesX;
int tileY = tileY0 - 1 + 2 * (nTile / ntilesX); // nTile / ntilesX;
......@@ -3675,12 +3692,13 @@ public class OrthoMap implements Comparable <OrthoMap>, Serializable{
return dout;
}
public static double [] testPhaseCorr(
public static double [] correlateWithPatternSquare(
double [] data_in,
double [] pattern_in,
boolean convolve, // convolve, not correlate
double [] wnd_in,
double phaseCoeff) {
double phaseCoeff,
double lpf_sigma) {
// phaseCoeff = 0.5;
boolean dbg=false;
double [] data = data_in.clone();
......@@ -3703,6 +3721,10 @@ public class OrthoMap implements Comparable <OrthoMap>, Serializable{
rslt_titles);
}
DoubleFHT doubleFHT = new DoubleFHT();
double [] filter = doubleFHT. getFrequencyFilter (
data, // double [] data, // just to find out size
0, // double highPassSigma,
lpf_sigma); // double lowPassSigma
double [] data_orig2 = data.clone();
double [] pattern_orig = pattern.clone();
boolean use_new = true; // false;
......@@ -3712,32 +3734,32 @@ public class OrthoMap implements Comparable <OrthoMap>, Serializable{
doubleFHT.transformPattern(patternFD);
if (convolve) {
corr_out = doubleFHT.convolvePattern (
data, // double [] first,
data, // double [] first,
patternFD, // double [] secondFD,
null, // double [] filter, // high/low pass filtering
filter, // double [] filter, // high/low pass filtering
null); // double [] first_save ) //null-OK
} else {
corr_out = doubleFHT.phaseCorrelatePattern ( // new
data, // double [] first,
data, // double [] first,
patternFD, // double [] secondFD,
phaseCoeff, // double phaseCoeff,
null, // double [] filter, // high/low pass filtering
filter, // double [] filter, // high/low pass filtering
null); // double [] first_save ) //null-OK
}
} else {
if (convolve) {
corr_out = doubleFHT. convolve ( // new
data, // double [] first,
data, // double [] first,
pattern, // double [] second,
null);// double [] filter, // high/low pass filtering
filter); // double [] filter, // high/low pass filtering
} else {
corr_out = doubleFHT. phaseCorrelate ( // new
data, // double [] first,
pattern, // double [] second,
phaseCoeff, // double phaseCoeff,
null, // double [] filter, // high/low pass filtering
null, // double [] first_save,
null); // double [] second_save ) //null-OK
data, // double [] first,
pattern, // double [] second,
phaseCoeff, // double phaseCoeff,
filter, // double [] filter, // high/low pass filtering
null, // double [] first_save,
null); // double [] second_save ) //null-OK
}
}
......@@ -3964,6 +3986,7 @@ public class OrthoMap implements Comparable <OrthoMap>, Serializable{
corr_patterns[n], //final double [] pattern, // [psize*psize]
false, // final boolean convolve, // convolve, not correlate
phaseCoeff, // final double phaseCoeff,
0, // final double lpf_sigma, // 0 - do not filter
debugLevel); // final int debugLevel) {
}
String [] patt_titles = new String[corrs_out.length];
......@@ -4056,6 +4079,7 @@ public class OrthoMap implements Comparable <OrthoMap>, Serializable{
corr_patterns[n], // final double [] pattern, // [psize*psize]
true, // final boolean convolve, // convolve, not correlate
phaseCoeff, // final double phaseCoeff,
0, // final double lpf_sigma, // 0 - do not filter
debugLevel); // final int debugLevel) {
}
double [] convolved_merged = mergePattenConvolutions(
......
This source diff could not be displayed because it is too large. You can view the blob instead.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment