Commit 477343de authored by Andrey Filippov's avatar Andrey Filippov

Implementing inter-scene matching with normalization by eigenvectors

parent 81d74a44
......@@ -2680,7 +2680,7 @@ public class OrthoMap implements Comparable <OrthoMap>, Serializable{
gd.addNumericField("Video output frame rate", video_fps, 3,7,"fps", "Frame rate of the video.");
gd.addCheckbox ("Compress AVI with JPEG", avi_compress_jpeg, "Use JPEG for AVI compression (false - use raw).");
gd.addNumericField("AVI JPEG quality", aviJpegQuality, 0, 4, "", "AVI JPEG quality if JPEG compression is used.");
gd.addCheckbox ("Convert AVI to WEBM", run_ffmpeg, "Use ffmped to convert intermediate AVI video to WEBM.");
gd.addCheckbox ("Convert AVI to WEBM", run_ffmpeg, "Use ffmpeg to convert intermediate AVI video to WEBM.");
gd.addStringField ("WEBM output extension", video_ext, 5,"WEBM output file extension including dot, normally \".webm\".");
gd.addStringField ("WEBM codec", video_codec, 5,"WEBM codec \"vp8\" or \"vp9\"(vp9 had problems).");
gd.addNumericField("WEBM CRF", video_crf, 0, 4, "", "WEBM compression quality (lower - better, 10 - good).");
......
......@@ -10,6 +10,7 @@ import com.elphel.imagej.common.PolynomialApproximation;
import com.elphel.imagej.common.ShowDoubleFloatArrays;
import com.elphel.imagej.tileprocessor.Corr2dLMA.Sample;
import Jama.EigenvalueDecomposition;
import Jama.Matrix;
/**
......@@ -2506,6 +2507,183 @@ public class Correlation2d {
}
/**
*
* @param data
* @param data_width
* @param abs_min
* @param rel_min
* @param min_peak
* @param eigen_sub_min - when calculating eigenvectors, subtract min from data, false - just skip
* @param fpn_mask
* @param ignore_border
* @param debug_data null or double[1]
* @param debug
* @return {dx,dy,strength, eig_x, eig_y, lambda0, lambda1), [eig_x, eig_y] <-> labmda0, lambda0 < lambda1.
*/
public static double [] getMaxXYCmEig(
double [] data, // will be modified if fpn_mask != null;
int data_width, // = 2 * transform_size - 1;
double abs_min,
double rel_min,
double min_peak,
double eig_sub_frac, // subtract fraction of threshold {eig_min_abs,eig_min_rel} after selecting by them (0 - select only, will have pedestal)
boolean [] fpn_mask,
boolean ignore_border, // only if fpn_mask != null - ignore tile if maximum touches fpn_mask
double [][] debug_data, // null or double [1]
boolean debug)
{
int data_height = data.length/data_width;
int center_xy = (data_width - 1)/2; // = transform_size - 1;
double x0 = center_xy, y0 = center_xy;
int imax= 0;
for (int i= 1; i < data.length;i++) {
if (Double.isNaN(data[i])) {
System.out.println("NaN in getMaxXYCmEig()");
return null;
}
if (data[i] > data[imax]) {
imax = i;
}
}
if (data[imax] < min_peak) {
return null; // too weak;even before fpn filter
}
int ix0 = imax % data_width;
int iy0 = imax / data_width;
x0 = ix0;
y0 = iy0;
//min_peak
// if (fpn_mask != null
if (fpn_mask != null) { // modifies data, returns null if hits fpn
for (int i = 0; i < fpn_mask.length; i++) if (fpn_mask[i]) {
int iy = i / data_width;
int ix = i - iy * data_width;
if (ignore_border) {
if(((ix - ix0) <= 1) && ((ix - ix0) >= -1) && ((iy - iy0) <= 1) && ((iy - iy0) >= -1)) {
return null; // new double[3];
}
}
int ix1 = 2 * ix0 - ix;
if ((ix1 >= 0) && (ix1 < data_width)) {
int iy1 = 2 * iy0 - iy;
if ((iy1 >= 0) && (iy1 < data_height)) {
data[iy1 * data_width + ix1] = 0.0; // zero out symmetrical to fpn mask around integer maximum
}
}
}
// update imax
imax= 0;
for (int i= 1; i < data.length;i++) {
if (data[i] > data[imax]) {
imax = i;
}
}
if (data[imax] < min_peak) {
return null; // too weak after fpn filter
}
}
// create mask of connected to max pixels
double mx = data[imax];
double min_d = Math.min(abs_min, rel_min*mx);
//
double sub_pedestal = min_d * eig_sub_frac;
boolean [] above_threshold = new boolean [data.length];
for (int i = 0; i < data.length; i++) {
above_threshold[i] = data[i] >= min_d;
}
boolean [] en_data = (new TileNeibs(data_width, data_height)).getConnected(
above_threshold, // boolean [] tiles,
ix0, // int seedX,
iy0); // int seedY)
// find centroid
double s0 = 0, sx=0,sy = 0, sx2 = 0, sy2=0, sxy = 0;
for (int iy = 0; iy < data_height; iy++) {
double y = iy - y0;
for (int ix = 0; ix < data_width; ix++) {
int indx = iy * data_width + ix;
if (en_data[indx]) { // assumes d >0, as it is >= min_d
double x = ix - x0;
double d = data[iy * data_width + ix] - sub_pedestal;
s0 += d;
sx += d * x;
sy += d * y;
sx2 += d * x * x;
sy2 += d * y * y;
sxy += d * x * y;
}
}
}
x0 += sx / s0; // relative to top-left
y0 += sy / s0;
/*
double s0 = 0, sx=0,sy = 0;
for (int iy = 0; iy < data_height; iy++) {
double y = iy - y0;
for (int ix = 0; ix < data_width; ix++) {
int indx = iy * data_width + ix;
if (en_data[indx]) { // assumes d >0, as it is >= min_d
double x = ix - x0;
double d = data[iy * data_width + ix] - sub_pedestal;
s0 += d;
sx += d * x;
sy += d * y;
}
}
}
x0 += sx / s0; // relative to top-left
y0 += sy / s0;
double sx2 = 0, sy2=0, sxy = 0;
for (int iy = 0; iy < data_height; iy++) {
double y = iy - y0;
for (int ix = 0; ix < data_width; ix++) {
int indx = iy * data_width + ix;
if (en_data[indx]) { // assumes d >0, as it is >= min_d
double x = ix - x0;
double d = data[iy * data_width + ix] - sub_pedestal;
sx2 += d * x * x;
sy2 += d * y * y;
sxy += d * x * y;
}
}
}
*/
//https://users.cs.utah.edu/~tch/CS4640/resources/A%20geometric%20interpretation%20of%20the%20covariance%20matrix.pdf
double cxx = sx2 - sx * sx / s0, cyy= sy2 - sy * sy / s0, cxy = sxy - sx * sy / s0;
Matrix covar = new Matrix(new double[][] {{cxx, cxy},{cxy,cyy}});
EigenvalueDecomposition eig = covar.eig();
double [] eigval = {eig.getD().get(0, 0),eig.getD().get(1, 1)};
double [][] eigvec = eig.getV().getArray(); // columns - vectors?
int eig_indx = (eigval[0] > eigval[1]) ? 1 : 0;
double [] rslt = {
x0 - center_xy,
y0 - center_xy,
mx,
eigvec[0][eig_indx],
eigvec[1][eig_indx],
eigval[eig_indx],
eigval[1-eig_indx]};
if (debug){
System.out.println("getMaxXYCm() -> "+rslt[0]+":"+rslt[1]+" ("+rslt[2]+
"), eigv0=["+rslt[3]+","+rslt[4]+"], lambda0="+rslt[5]+", lambda1="+rslt[6]);
}
if (debug_data != null) {
debug_data[0] = data.clone();
for (int i = 0; i < data.length; i++) {
if (!en_data[i]) {
debug_data[0][i] = Double.NaN;
}
}
}
return rslt;
}
/**
* Find maximum of the 2d array projected on a specified vector using centroid.
* On the first stage integer maximum is found, then several refining operations
......
......@@ -8950,11 +8950,18 @@ public class ImageDttCPU {
final float [][] fcorr_data_out = new float[layers][]; // [tilesY*tilesX*tile_size*tile_size];
final Thread[] threads = newThreadArray(threadsMax);
final AtomicInteger ai = new AtomicInteger(0);
/*
for (int layer = 0; layer < layers; layer++) if (fcorr_data[flayer][layer] != null){
fcorr_data_out[layer] = new float[tilesY*tilesX*tile_size*tile_size];
Arrays.fill(fcorr_data_out[layer], Float.NaN);
}
*/
// now some tiles may have null for some layers, non-null - for others
for (int layer = 0; layer < layers; layer++){
fcorr_data_out[layer] = new float[tilesY*tilesX*tile_size*tile_size];
Arrays.fill(fcorr_data_out[layer], Float.NaN);
}
final boolean [] used_layers = new boolean[layers];
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
@Override
......@@ -8970,16 +8977,19 @@ public class ImageDttCPU {
tileY = nTile / tilesX;
tileX = nTile % tilesX;
if (fcorr_data[iCorrTile] != null) {
for (int layer = 0; layer < layers; layer++) if (fcorr_data[iCorrTile][layer] != null){ // sparse
for (int i = 0; i < corr_size;i++){
System.arraycopy(
fcorr_data[iCorrTile][layer],
corr_size* i,
fcorr_data_out[layer],
((tileY*tile_size + i) *tilesX + tileX)*tile_size ,
corr_size);
}
if (fcorr_data[iCorrTile] != null) {
for (int layer = 0; layer < layers; layer++) if (fcorr_data[iCorrTile][layer] != null){ // sparse
used_layers[layer] = true;
for (int i = 0; i < corr_size;i++){
if (fcorr_data[iCorrTile][layer] != null) {
System.arraycopy(
fcorr_data[iCorrTile][layer],
corr_size* i,
fcorr_data_out[layer],
((tileY*tile_size + i) *tilesX + tileX)*tile_size ,
corr_size);
}
}
}
}
}
......@@ -8987,7 +8997,14 @@ public class ImageDttCPU {
};
}
startAndJoin(threads);
for (int layer = 0; layer < layers; layer++){
if (!used_layers[layer]) {
fcorr_data_out[layer]=null;
}
}
return fcorr_data_out;
// remove unused output layers:
}
......
......@@ -1900,7 +1900,7 @@ public class QuaternionLma {
Matrix mdelta = jtjl_inv.times(jty);
if (debug_level>2) {
System.out.println("mdelta");
mdelta.print(18, 6);
mdelta.print(18, 10);
}
double scale = 1.0;
......
......@@ -1268,6 +1268,58 @@ public class TileNeibs{
}
return enum_clust_ordered;
}
/**
* Get tiles, connected to the seed tile
* @param tiles selected tiles, size should be sizeX * sizeY
* @param tiles
* @param seedX
* @param seedY
* @return boolean array of tiles that are connected to the seed tile
*/
public boolean [] getConnected(
boolean [] tiles,
int seedX,
int seedY)
{
int [] waves = new int [tiles.length];
boolean [] rslt = new boolean [tiles.length];
for (int i = 0; i < tiles.length; i++) waves[i] = tiles[i] ? 0: -1;
ArrayList<Integer> front = new ArrayList<Integer>();
int [] enum_clust = new int[tiles.length];
if ((seedX < 0) || (seedY < 0) || (seedX >= sizeX) || (seedY>= sizeY) || (tiles.length != (sizeX*sizeY)) ) {
return null;
}
int start_indx = seedX + seedY * sizeX;
if (!tiles[start_indx]) {
return rslt;
}
Integer ipx = start_indx;
Integer ipx1;
front.clear();
int area = 1;
waves[ipx] = area;
rslt[ipx] = true;
front.add(ipx);
while (!front.isEmpty()) {
ipx = front.remove(0);// get oldest element
for (int d = 0; d < dirs; d++){
ipx1 = getNeibIndex(ipx, d);
if (ipx1 >= 0){
if (waves[ipx1] == 0) {
area++;
waves[ipx1] = area;
rslt[ipx1] = true;
front.add(ipx1);
}
}
}
}
return rslt;
}
public static int getMax(
int [] data)
{
......
......@@ -1697,6 +1697,28 @@ public class StructureFromMotion {
final SfmCorr [] sfmCorr = new SfmCorr [accum_PD.length];
final double [][][] data_PD = (neibs_PD == null) ? (new double[][][] {accum_PD}):(new double[][][] {accum_PD,neibs_PD});
boolean show_2d_correlations = debugLevel>10;
if (show_2d_correlations) {
String [] dbg_titles = (neibs_PD == null) ? (new String[] {"accum"}):(new String[] {"accum","neibs"});
double [][] dbg_2d_corrs = ImageDtt.corr_partial_dbg( // not used in lwir
data_PD, // final double [][][] corr_data, // [layer][tile][(2*transform_size-1)*(2*transform_size-1)] // if null - will not calculate
tilesX, // final int tilesX,
corr_size, //final int corr_size, // 15
clt_parameters.corr_border_contrast, // final double border_contrast,
debugLevel); // final int globalDebugLevel)
int tilesY = data_PD[0].length/tilesX;
ShowDoubleFloatArrays.showArrays(
dbg_2d_corrs,
tilesX * (corr_size + 1),
tilesY * (corr_size + 1),
true,
"sfm_corr2d-"+scene_pairs[num_pairs-1][1].getImageName()+"-"+
scene_pairs[0][0].getImageName()+"-"+num_pairs+"_fz"+((int) corr_fz_inter),
dbg_titles);
}
final double [][][] disp_corr = new double [data_PD.length][][];
for (int i = 0; i < data_PD.length; i++) {
disp_corr[i] = getSfmDisparityCorrectionStrength(
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment