Commit defa789f authored by Andrey Filippov's avatar Andrey Filippov

Better pixel offset estimation

parent 1f6a9eb9
...@@ -626,9 +626,21 @@ public class CorrVector{ // TODO: Update to non-quad (extract to a file first)? ...@@ -626,9 +626,21 @@ public class CorrVector{ // TODO: Update to non-quad (extract to a file first)?
return toString(false); return toString(false);
} }
/**
* Astimate azimuth/tilt angle for 1 pixel shift
* @return Angle in radians to rotate image by 1 pixel by 1 pixel
*/
public double getTiltAzPerPixel() { public double getTiltAzPerPixel() {
return 1.0/1000.0*geometryCorrection.focalLength/geometryCorrection.pixelSize; return 1.0/1000.0*geometryCorrection.focalLength/geometryCorrection.pixelSize;
} }
/**
* Estimate roll angle for average pixel shift of 1 pixel
* @return Angle in radians when pixels at 1/4 sensor width rotate by 1 pixel
*/
public double getRollPixel() {
return 4.0/geometryCorrection.getSensorWH()[0];
}
public String toString(boolean short_out) { public String toString(boolean short_out) {
String s; String s;
......
...@@ -1280,7 +1280,7 @@ public class ImageDtt extends ImageDttCPU { ...@@ -1280,7 +1280,7 @@ public class ImageDtt extends ImageDttCPU {
gpuQuad.execConvertDirect(use_reference_buffer, wh, erase_clt); // put results into a "reference" buffer gpuQuad.execConvertDirect(use_reference_buffer, wh, erase_clt); // put results into a "reference" buffer
} }
public void setReferenceTDMotionBlur( public void setReferenceTDMotionBlur( // updates tasks
final int erase_clt, final int erase_clt,
final int [] wh, // null (use sensor dimensions) or pair {width, height} in pixels final int [] wh, // null (use sensor dimensions) or pair {width, height} in pixels
final ImageDttParameters imgdtt_params, // Now just extra correlation parameters, later will include, most others final ImageDttParameters imgdtt_params, // Now just extra correlation parameters, later will include, most others
......
...@@ -4803,6 +4803,16 @@ public class OpticalFlow { ...@@ -4803,6 +4803,16 @@ public class OpticalFlow {
// consider copyJP4src for the lower half (now it is not needed) // consider copyJP4src for the lower half (now it is not needed)
} }
force_initial_orientations = true; force_initial_orientations = true;
} else {
try_ref_scene.restoreInterProperties( // restore properties for interscene processing (extrinsics, ers, ...) // get relative poses (98)
null, // String path, // full name with extension or null to use x3d directory
false, // boolean all_properties,// null, // Properties properties, // if null - will only save extrinsics)
debugLevel);
first_last_index = try_ref_scene.getFirstLastIndex(quadCLTs);
if (first_last_index == null) {
// if ((first_last_index == null) ||(first_last_index[0] < 0) || (first_last_index[1] < 0)) {
force_initial_orientations = true;
}
} }
} }
} else { } else {
...@@ -4929,6 +4939,26 @@ public class OpticalFlow { ...@@ -4929,6 +4939,26 @@ public class OpticalFlow {
int center_index =quadCLTs[last_index].getReferenceIndex(null); int center_index =quadCLTs[last_index].getReferenceIndex(null);
if (center_index == -1) { if (center_index == -1) {
force_initial_orientations = true; force_initial_orientations = true;
} else {
QuadCLT try_ref_scene = (QuadCLT) quadCLT_main.spawnNoModelQuadCLT( // will conditionImageSet
quadCLTs[last_index].getReferenceTimestamp(), // set_channels[last_index].set_name,
clt_parameters,
colorProcParameters, //
threadsMax,
debugLevel-2);
if (try_ref_scene == null) {
force_initial_orientations = true;
} else {
try_ref_scene.restoreInterProperties( // restore properties for interscene processing (extrinsics, ers, ...) // get relative poses (98)
null, // String path, // full name with extension or null to use x3d directory
false, // boolean all_properties,// null, // Properties properties, // if null - will only save extrinsics)
debugLevel);
int [] first_last_index = try_ref_scene.getFirstLastIndex(quadCLTs);
if ((first_last_index == null)) { // ||(first_last_index[0] < 0) || (first_last_index[1] < 0)) {
force_initial_orientations = true;
}
}
} }
} else { } else {
......
...@@ -413,7 +413,98 @@ public class QuadCLTCPU { ...@@ -413,7 +413,98 @@ public class QuadCLTCPU {
double z_avg = getGeometryCorrection().getZFromDisparity(disp_avg); double z_avg = getGeometryCorrection().getZFromDisparity(disp_avg);
return z_avg; return z_avg;
} }
/**
* Estimate average pixel offset between 2 scenes for avoiding FPN. This scene
* should have DSI to estimate average distance
* @param xyzatr0 - first scene [[x,y,z],[a,t,r]]
* @param xyzatr1 - second scene [[x,y,z],[a,t,r]]
* @param use_lma
* @return
*/
public double estimatePixelShift(
double [][] xyzatr0,
double [][] xyzatr1,
boolean use_lma
) {
return estimatePixelShift(
xyzatr0,
xyzatr1,
use_lma,
0);
}
public double estimatePixelShift(
double [][] xyzatr0,
double [][] xyzatr1,
boolean use_lma,
int mode
) {
boolean sign_x = (mode & 1) != 0;
boolean sign_y = (mode & 2) != 0;
double z = getAverageZ(use_lma);
if (Double.isNaN(z)) {
System.out.println("estimatePixelShift(): failed estimating distance - no DSI or too few DSI tiles.");
return Double.NaN;
}
double aztl_pix_per_rad = 1.0/getErsCorrection().getCorrVector().getTiltAzPerPixel();
double roll_pix_per_rad = 1.0/getErsCorrection().getCorrVector().getRollPixel();
double [][] dxyzatr = new double [2][3];
for (int i = 0; i<2;i++) for (int j=0; j < 3; j++) {
dxyzatr[i][j] = xyzatr1[i][j]-xyzatr0[i][j];
}
double eff_az = dxyzatr[1][0] + (sign_x?-1:1) * dxyzatr[0][0] / z;
double eff_tl = dxyzatr[1][1] + (sign_y?-1:1) * dxyzatr[0][1] / z;
double pix_x = -eff_az*aztl_pix_per_rad;
double pix_y = eff_tl*aztl_pix_per_rad;
double pix_roll = dxyzatr[1][2] * roll_pix_per_rad;
double pix_zoom = (0.25*getErsCorrection().getSensorWH()[0])* dxyzatr[0][2]/z;
double est_pix = Math.sqrt(pix_x*pix_x + pix_y*pix_y + pix_roll*pix_roll + pix_zoom*pix_zoom);
return est_pix;
}
public double estimateAverageShift(
double [][] xyzatr0,
double [][] xyzatr1,
double average_z,
boolean use_rot,
boolean rectilinear
) {
ErsCorrection ers = getErsCorrection();
double disp_avg = ers.getDisparityFromZ(average_z);
int [] wh = ers.getSensorWH();
double [][] xy_pairs = {{0.5*wh[0], 0.5*wh[1]}};
if (use_rot) {
xy_pairs = new double [][] {
{0.25*wh[0],0.25*wh[1]},
{0.75*wh[0],0.25*wh[1]},
{0.25*wh[0],0.75*wh[1]},
{0.75*wh[0],0.75*wh[1]}};
}
double s2 = 0.0;
for (double [] xy:xy_pairs) {
double [] pXpYD = ers.getImageCoordinatesERS(
null, // QuadCLT cameraQuadCLT, // camera station that got image to be to be matched
xy[0], // double px, // pixel coordinate X in the reference view
xy[1], // double py, // pixel coordinate Y in the reference view
disp_avg, // double disparity, // this reference disparity
!rectilinear, // boolean distortedView, // This camera view is distorted (diff.rect), false - rectilinear
xyzatr0[0], // double [] reference_xyz, // this view position in world coordinates (typically zero3)
xyzatr0[1], // double [] reference_atr, // this view orientation relative to world frame (typically zero3)
!rectilinear, // boolean distortedCamera, // camera view is distorted (false - rectilinear)
xyzatr1[0], // double [] camera_xyz, // camera center in world coordinates
xyzatr1[1], // double [] camera_atr, // camera orientation relative to world frame
OpticalFlow.LINE_ERR); // double line_err); // threshold error in scan lines (1.0)
double dx = pXpYD[0]-xy[0];
double dy = pXpYD[1]-xy[1];
s2 += dx*dx+dy*dy;
}
double offs_avg = Math.sqrt(s2/xy_pairs.length);
return offs_avg;
}
public double [][] getGround( public double [][] getGround(
CLTParameters clt_parameters, CLTParameters clt_parameters,
boolean use_lma, boolean use_lma,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment