Commit fadcab9e authored by Andrey Filippov's avatar Andrey Filippov

Working on radar

parent 692610be
......@@ -11,6 +11,7 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicInteger;
......@@ -28,8 +29,11 @@ import com.elphel.imagej.gpu.GpuQuad;
import com.elphel.imagej.gpu.TpTask;
import com.elphel.imagej.ims.UasLogReader;
import com.elphel.imagej.tileprocessor.Correlation2d;
import com.elphel.imagej.tileprocessor.ErsCorrection;
import com.elphel.imagej.tileprocessor.GeometryCorrection;
import com.elphel.imagej.tileprocessor.ImageDtt;
import com.elphel.imagej.tileprocessor.IntersceneMatchParameters;
import com.elphel.imagej.tileprocessor.OpticalFlow;
import com.elphel.imagej.tileprocessor.QuadCLT;
import com.elphel.imagej.tileprocessor.TDCorrTile;
import com.elphel.imagej.tileprocessor.TileNeibs;
......@@ -86,12 +90,6 @@ public class CuasMotion {
final static public int TOTALS_BAD = 3;
final static public int TOTALS_LENGTH = TOTALS_BAD+1;
/*
final double [] importance, // for now (each - squared?): [0] - Amplitude (A/A0), 1 - RMS (RMS0/RMS), 2 - RRMS((RMS/A0) / (RMS/A)
*/
public static String RESOURCE_GRAPICS_DIR ="graphics";
public static String ICON_TARGET_1X = "TDbox_dashed21x21_bold.png";
public static String ICON_TARGET_2X = "TDbox_dashed43x43_2px.png";
......@@ -103,6 +101,15 @@ public class CuasMotion {
};
public static String ICON_BLUE = "Circle63x63blue.png";
public static final int ANNOT_ID = 0; // target ID (global index)
public static final int ANNOT_RANGE = 1; // range
public static final int ANNOT_TRANG = 2; // true range
public static final int ANNOT_AGL = 3; // altitude AGL
public static final int ANNOT_AZ = 4; // azimuth
public static final int ANNOT_VS = 5; // vertical speed
public static final int ANNOT_GS = 6; // ground speed
public static final int ANNOT_HDG = 7; // heading
private final GPUTileProcessor gpuTileProcessor;
private CLTParameters clt_parameters=null;
private QuadCLT parentCLT = null;
......@@ -2018,7 +2025,7 @@ public class CuasMotion {
double [][][] target_sequence = new double [num_seq][num_tiles][];
for (int nseq=0; nseq < num_seq; nseq++) {
for (int ntile = 0; ntile < num_tiles; ntile++) if (!Double.isNaN(targets_file[CuasMotionLMA.RSLT_X][nseq][ntile]) ||
!Double.isNaN(targets_file[CuasMotionLMA.RSLT_FL_PX][nseq][ntile])){
((num_fields >= CuasMotionLMA.RSLT_FL_PX) && !Double.isNaN(targets_file[CuasMotionLMA.RSLT_FL_PX][nseq][ntile]))){
double [] v = new double[num_fields_augmented]; // cut to originally calculated fields
for (int i = 0; i < num_fields; i++) {
v[i] = targets_file[i][nseq][ntile];
......@@ -3889,7 +3896,7 @@ public class CuasMotion {
return iclt_fimg[0][0].clone();
}
public static ImagePlus convertToRgbAnnotateTargets(
public ImagePlus convertToRgbAnnotateTargets(
CLTParameters clt_parameters,
final double input_range, // 5
final boolean scale2x,
......@@ -3914,6 +3921,8 @@ public class CuasMotion {
final String [] titles,
final UasLogReader uasLogReader,
final int debugLevel) {
// parentCLT
final GeometryCorrection gc = parentCLT.getGeometryCorrection();
String uas_log_path = null;
boolean annotate_uas = show_uas && (uasLogReader != null);
if (annotate_uas) {
......@@ -3987,9 +3996,6 @@ public class CuasMotion {
detected_rgba[i][s] = (detected_pixels[i] >> (8 * s)) & 0xff;
}
}
final int scale = scale2x? 2 : 1;
final Thread[] threads = ImageDtt.newThreadArray(QuadCLT.THREADS_MAX);
final AtomicInteger ai = new AtomicInteger(0);
......@@ -4255,6 +4261,7 @@ public class CuasMotion {
ImageProcessor ip = fstack_scenes.getProcessor(nscene+1);
String txt = getTargetText(
clt_parameters, // CLTParameters clt_parameters,
gc, //GeometryCorrection gc,
targets[ntarget]); // double [] target);
if (show_disp) {
double disparity = targets[ntarget][TARGET_DISPARITY];
......@@ -4312,6 +4319,685 @@ public class CuasMotion {
return imp;
}
public static ImagePlus generateRadarImage(
CLTParameters clt_parameters,
int annot_mode, // specify bits
QuadCLT scene,
double [][][] targets,
UasLogReader uasLogReader, // contains camera orientation (getCameraATR())
String title,
String [] key_titles, // corresponding to top targets dimension
ImagePlus imp_lwir, // all titles, one per frame
int debugLevel) {
ImageStack stack_lwir = imp_lwir.getStack();
String [] scene_titles = new String[stack_lwir.getSize()];
for (int i = 0; i < stack_lwir.getSize(); i++) {
scene_titles[i] = stack_lwir.getSliceLabel(i+1);
}
return generateRadarImage(
clt_parameters,
annot_mode, // int annot_mode, // specify bits
scene, // QuadCLT scene,
targets,
uasLogReader, // contains camera orientation (getCameraATR())
title,
key_titles, // corresponding to top targets dimension
scene_titles, // all titles, one per frame
debugLevel);
}
public static ImagePlus generateRadarImage(
CLTParameters clt_parameters,
int annot_mode, // specify bits
QuadCLT scene,
double [][][] targets,
UasLogReader uasLogReader, // contains camera orientation (getCameraATR())
String title,
String [] key_titles, // corresponding to top targets dimension
String [] scene_titles, // all titles, one per frame
int debugLevel) {
final int num_seq = key_titles.length;
final int num_tiles = targets[0].length;
final int sensor_width = scene.getGeometryCorrection().getSensorWH()[0];
final int tileSize = GPUTileProcessor.DTT_SIZE;
final int tilesX = sensor_width/tileSize;
final double ifov = scene.getGeometryCorrection().getIFOV();
final double radar_range = clt_parameters.imp.cuas_radar_range;
final String font_name = clt_parameters.imp.cuas_font_name;
final int font_size = clt_parameters.imp.cuas_font_size;
final int font_type = clt_parameters.imp.cuas_font_type;
final int image_scale = 2; // here always 2
final int space_before_text = 2 * image_scale;
final int width = 540; // calculate
final int height = 1024; // calculate
final int radar_height = 970; // calculate
// move to configs:
final int bottom_gap = 10;
final int infinity_gap = 10; // add to radar_height
final double ring_step = 100.0;
final double dir_step = 5.0; // degrees
final int grid_font_size = 7;
final int grid_azimuth_top= 5; // where to put azimuth
final int grid_margin= 5; // grid annotation from left/right
final Color rings_color = new Color(140,140,140);
final Color uas_color = new Color( 0,100,140);
final Color target_color = new Color( 0,255,100);
final double uas_radius = 4.0;
final double target_radius = 1.5;
final boolean annotate_grid = true;
final Font font_grid = annotate_grid ? (new Font(font_name, font_type, image_scale * grid_font_size)): null;
final int [] camera_xy0 = new int[] {width/2,height-bottom_gap};
int num_scenes = scene_titles.length;
final ColorProcessor [] color_processors = new ColorProcessor[num_scenes];
final int [][] pixels = new int[num_scenes][width*height];
final ImageStack stack = new ImageStack (width, height);
for (int nscene = 0; nscene < num_scenes; nscene++) {
color_processors[nscene] = new ColorProcessor(width, height, pixels[nscene]);
stack.addSlice(scene_titles[nscene], color_processors[nscene]);
}
double camera_az = uasLogReader.getCameraATR()[0];
createGrid(
color_processors[0], // ColorProcessor colorProcessor,
scene, // QuadCLT scene,
camera_xy0, // int [] camera_xy0,
radar_height, // int height,
camera_az, // double camera_az,
rings_color, // Color color_grid,
font_grid, // Font font, // if null = do not annotate
radar_range, // double max_z, // corresponds to height
grid_azimuth_top, // int grid_azimuth_top, // = 5; // where to put azimuth
grid_margin, // int grid_margin, // = 5; // grid annotation from left/right
ring_step, // double ring_step, // = 100.0;
dir_step); // double dir_step) // = 5.0; // degrees
for (int nscene = 1; nscene < num_scenes; nscene++) {
System.arraycopy(pixels[0], 0, pixels[nscene], 0, pixels[nscene].length);
}
ImagePlus imp_new = new ImagePlus(title,stack);
final Thread[] threads = ImageDtt.newThreadArray();
final AtomicInteger ai = new AtomicInteger(0);
final AtomicInteger amax = new AtomicInteger(-1);
final int [] uas_tiles = new int[num_seq];
// Arrays.fill(uas_tiles, -1);
final int [][] local_tiles = new int [num_seq][];
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nSeq = ai.getAndIncrement(); nSeq < targets.length; nSeq = ai.getAndIncrement()) {
double [][] targets_seq = targets[nSeq];
/// key - local target_id > 0
/// Key 0 - special case: tiles where UAS log data is stored (may be the same or different than detected tiles
/// value - tile index
HashMap<Integer,Integer> local_tiles_map = new HashMap<Integer,Integer>(); //
int max_ltarg = -1;
for (int ntile = 0; ntile < num_tiles; ntile++) {
double [] target = targets_seq[ntile];
if (target != null) {
if (!Double.isNaN(target[CuasMotionLMA.RSLT_FL_PX])) {
local_tiles_map.put(0, ntile); // flight log data
}
if (!Double.isNaN(target[CuasMotionLMA.RSLT_GLOBAL])) {
int ltarg = (int) target[CuasMotionLMA.RSLT_GLOBAL];
max_ltarg = Math.max(max_ltarg, ltarg);
local_tiles_map.put(ltarg, ntile); // target data
}
}
}
amax.getAndAccumulate(max_ltarg, Math::max);
local_tiles[nSeq] = new int[max_ltarg+1];
Arrays.fill(local_tiles[nSeq],-1);
for (int i = 0; i <= max_ltarg; i++) if (local_tiles_map.containsKey(i)) {
local_tiles[nSeq][i] = local_tiles_map.get(i);
}
}
}
};
}
ImageDtt.startAndJoin(threads);
final int [][] ltargets_first_last = new int [amax.get()][]; // correct
for (int nseq = 0; nseq < num_seq; nseq++) {
uas_tiles[nseq] = local_tiles[nseq][0];
}
for (int ltarg = 1; ltarg <= ltargets_first_last.length; ltarg++) { // 0 - for UAS log
int indx_first, indx_last;
for (indx_first = 0; (indx_first < num_seq) && ((local_tiles[indx_first].length <= ltarg ) || (local_tiles[indx_first][ltarg] < 0)); indx_first++);
if (indx_first < num_seq) {
for (indx_last = num_seq - 1; (indx_last >= 0) && ((local_tiles[indx_last].length <= ltarg ) || (local_tiles[indx_last][ltarg] < 0)); indx_last--); // Index 5 out of bounds for length 5
ltargets_first_last[ltarg-1] = new int[] {indx_first,indx_last};
}
}
final int [] target_ids = new int [ltargets_first_last.length-1];
for (int ltarg = 1; ltarg <= target_ids.length; ltarg++) {
int nseq = ltargets_first_last[ltarg-1][0];
int ntile = local_tiles[nseq][ltarg]; // 1- based 0 - uas log
double [] target = targets[nseq][ntile];
target_ids[ltarg-1] = (int) target[CuasMotionLMA.RSLT_TARGET_ID];
}
Integer [] targets_order_integer = new Integer [target_ids.length];
for (int i = 0; i < targets_order_integer.length; i++) {
targets_order_integer[i] = i;
}
Arrays.sort(targets_order_integer, new Comparator<Integer>() { //
@Override
public int compare(Integer lhs, Integer rhs) {
return Integer.compare(target_ids[lhs], target_ids[rhs]);
}
});
// 0-based indices, 0-based values.
int [] targets_order = Arrays.stream(targets_order_integer).mapToInt(Integer::intValue).toArray();
// mark UAS log and targets on the radar
ai.set(0);
final ErsCorrection ersCorrection = scene.getErsCorrection();
int [] scene_indices = getSceneIndices(
key_titles, // String [] key_titles, // corresponding to top targets dimension
scene_titles); // String [] scene_titles) { // all titles, one per frame
final double kpix = radar_height/radar_range;
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
int [] conflict_pix = new int [width * height];
ColorProcessor conflict_cp = new ColorProcessor(width, height, conflict_pix);
for (int nSeq = ai.getAndIncrement(); nSeq < (targets.length - 1); nSeq = ai.getAndIncrement()) {
Arrays.fill(conflict_pix, 0);
// for now only for targets longer that 1 series
int nscene0 = scene_indices[nSeq];
int nscene1 = scene_indices[nSeq+1];
double kscene = 1.0/(nscene1-nscene0);
// plot UAS
if ((uas_tiles[nSeq] >= 0) && (uas_tiles[nSeq+1] >= 0)) {
double [] uas_target0= targets[nSeq][uas_tiles[nSeq]];
double [] uas_target1= targets[nSeq+1][uas_tiles[nSeq+1]];
for (int nscene = nscene0; nscene <= nscene1; nscene++) {
double k = (nscene - nscene0) * kscene; // 0 when nscene=nscene0
double px = interpolate(uas_target0[CuasMotionLMA.RSLT_FL_PX], uas_target1[CuasMotionLMA.RSLT_FL_PX],k);
double py = interpolate(uas_target0[CuasMotionLMA.RSLT_FL_PY], uas_target1[CuasMotionLMA.RSLT_FL_PY],k);
double disparity = interpolate(uas_target0[CuasMotionLMA.RSLT_FL_DISP],uas_target1[CuasMotionLMA.RSLT_FL_DISP],k);
// needed relative to the camera
// {right, up, to_camera
double [] xyz = ersCorrection.getWorldCoordinatesERS( // ersCorrection - reference
px, // double px, // pixel coordinate X in the reference view
py, // double py, // pixel coordinate Y in the reference view
disparity, // double disparity, // reference disparity
true, // boolean distortedView, // This camera view is distorted (diff.rect), false - rectilinear
OpticalFlow.ZERO3, // double [] reference_xyz, // this view position in world coordinates (typically ZERO3)
OpticalFlow.ZERO3); // double [] reference_atr, // this view orientation relative to world frame (typically ZERO3)
double x = camera_xy0[0] + xyz[0] * kpix;
double y = camera_xy0[1] + xyz[2] * kpix; // positive Z - down in the image (verify)
drawCircle(
color_processors[nscene], // ColorProcessor colorProcessor,
uas_color, // Color color, // or null
x, // double xc,
y, // double yc,
uas_radius); // double radius)
drawCircle(
conflict_cp, // ColorProcessor colorProcessor,
uas_color, // Color color, // or null
x, // double xc,
y, // double yc,
uas_radius); // double radius)
}
}
// plot targets
int icon_width = (int) Math.round(2 * target_radius); // use icon width when used
int [][][] target_coord = new int [local_tiles[nSeq].length-1][][]; // save centers of targets and text coordinates
Rectangle [][] annot_boxes = new Rectangle[local_tiles[nSeq].length-1][];
String [][] annots = new String [local_tiles[nSeq].length-1][];
// add integer color and bg_color (-1 - transparent)
for (int iltarget = 0; iltarget < targets_order.length; iltarget ++) {
int ltarget = targets_order[iltarget] + 1;
// change
// for (int ltarget = 1; ltarget < local_tiles[nSeq].length; ltarget++) {
if ( (local_tiles[nSeq].length > ltarget) &&
(local_tiles[nSeq+1].length > ltarget) &&
(local_tiles[nSeq][ltarget] >=0) &&
(local_tiles[nSeq+1][ltarget]>=0)) {
int ntile0 = local_tiles[nSeq ][ltarget];
int ntile1 = local_tiles[nSeq+1][ltarget];
double [] target0= targets[nSeq ][ntile0];
double [] target1= targets[nSeq+1][ntile1];
double px0 = (ntile0 % tilesX +0.5) * tileSize + target0[CuasMotionLMA.RSLT_X]; // ignoring velocities
double py0 = (ntile0 / tilesX +0.5) * tileSize + target0[CuasMotionLMA.RSLT_Y];
double range0= target0[CuasMotionLMA.RSLT_RANGE_LIN];
double px1 = (ntile1 % tilesX +0.5) * tileSize + target1[CuasMotionLMA.RSLT_X];
double py1 = (ntile1 / tilesX +0.5) * tileSize + target1[CuasMotionLMA.RSLT_Y];
double range1= target0[CuasMotionLMA.RSLT_RANGE_LIN];
double disparity0 = Double.NaN;
double disparity1 = Double.NaN;
double radar_x0, radar_y0, radar_x1, radar_y1;
if ((range0 <= radar_range) && (range1 <= radar_range)) {
disparity0 = ersCorrection.getDisparityFromZ(range0);
disparity1 = ersCorrection.getDisparityFromZ(range1);
double [] xyz0 = ersCorrection.getWorldCoordinatesERS( // ersCorrection - reference
px0, // double px, // pixel coordinate X in the reference view
py0, // double py, // pixel coordinate Y in the reference view
disparity0, // double disparity, // reference disparity
true, // boolean distortedView, // This camera view is distorted (diff.rect), false - rectilinear
OpticalFlow.ZERO3, // double [] reference_xyz, // this view position in world coordinates (typically ZERO3)
OpticalFlow.ZERO3); // double [] reference_atr, // this view orientation relative to world frame (typically ZERO3)
double [] xyz1 = ersCorrection.getWorldCoordinatesERS( // ersCorrection - reference
px1, // double px, // pixel coordinate X in the reference view
py1, // double py, // pixel coordinate Y in the reference view
disparity1, // double disparity, // reference disparity
true, // boolean distortedView, // This camera view is distorted (diff.rect), false - rectilinear
OpticalFlow.ZERO3, // double [] reference_xyz, // this view position in world coordinates (typically ZERO3)
OpticalFlow.ZERO3); // double [] reference_atr, // this view orientation relative to world frame (typically ZERO3)
radar_x0 = camera_xy0[0] + xyz0[0] * kpix;
radar_y0 = camera_xy0[1] + xyz0[2] * kpix; // positive Z - down in the image (verify)
radar_x1 = camera_xy0[0] + xyz1[0] * kpix;
radar_y1 = camera_xy0[1] + xyz1[2] * kpix; // positive Z - down in the image (verify)
} else {
radar_x0 = camera_xy0[0] + (px0 - sensor_width/2) * ifov * (radar_height + infinity_gap);
radar_y0 = camera_xy0[1] - (radar_height + infinity_gap);
radar_x1 = camera_xy0[0] + (px1 - sensor_width/2) * ifov * (radar_height + infinity_gap);
radar_y1 = camera_xy0[1] - (radar_height + infinity_gap);
}
target_coord[ltarget-1] = new int [nscene1-nscene0+1][2];
for (int nscene = nscene0; nscene <= nscene1; nscene++) {
double k = (nscene - nscene0) * kscene; // 0 when nscene=nscene0
double radar_x = interpolate(radar_x0, radar_x1, k);
double radar_y = interpolate(radar_y0, radar_y1, k);
target_coord[ltarget-1][nscene-nscene0][0] = (int) Math.round(radar_x);
target_coord[ltarget-1][nscene-nscene0][1] = (int) Math.round(radar_y);
drawCircle(
color_processors[nscene], // ColorProcessor colorProcessor,
target_color, // Color color, // or null
radar_x, // double xc,
radar_y, // double yc,
target_radius); // double radius)
drawCircle(
conflict_cp, // ColorProcessor colorProcessor,
target_color, // Color color, // or null
radar_x, // double xc,
radar_y, // double yc,
target_radius); // double radius)
}
}
}
}
}
};
}
ImageDtt.startAndJoin(threads);
return imp_new;
}
public static String getAnnotationText(
CLTParameters clt_parameters,
int annot_mode, // specify bits
int ntile0,
int ntile1,
double [] target0,
double [] target1,
double k,
double [] camera_atr,
ErsCorrection ersCorrection) {
boolean show_inf = clt_parameters.imp.cuas_show_inf; // true; // Show distance greater than max (or negativce) as infinity
boolean show_inf_gt = clt_parameters.imp.cuas_show_inf_gt; // false; // Use ">max" instead of infinity symbol
double max_annot_range = clt_parameters.imp.cuas_rng_limit; // 5000, maybe make a separate parameter
double max_axial_range = clt_parameters.imp.cuas_radar_range; // may be a separate - maximal range for axial velocity/heading
boolean reserve_missing_fields = true; // make a parameter.Reserve a line for requested but missing parameters
double ifov = ersCorrection.getIFOV();
int sensor_width = ersCorrection.getSensorWH()[0];
int tileSize = GPUTileProcessor.DTT_SIZE;
int tilesX = sensor_width/tileSize;
int id = (int) target0[CuasMotionLMA.RSLT_TARGET_ID];
double px0 = (ntile0 % tilesX +0.5) * tileSize + target0[CuasMotionLMA.RSLT_X]; // ignoring velocities
double py0 = (ntile0 / tilesX +0.5) * tileSize + target0[CuasMotionLMA.RSLT_Y];
double range0= target0[CuasMotionLMA.RSLT_RANGE_LIN];
double px1 = (ntile1 % tilesX +0.5) * tileSize + target1[CuasMotionLMA.RSLT_X];
double py1 = (ntile1 / tilesX +0.5) * tileSize + target1[CuasMotionLMA.RSLT_Y];
double range1= target0[CuasMotionLMA.RSLT_RANGE_LIN];
double px = interpolate (px0, px1, k);
double py = interpolate (py0, py1, k);
double range = interpolate (range0, range1, k);
// range may be Double.NaN if missing
if (range > max_annot_range) {
range = Double.POSITIVE_INFINITY;
}
double true_range = Double.isNaN (target0[CuasMotionLMA.RSLT_FL_RANGE]) ? Double.NaN:
interpolate (target0[CuasMotionLMA.RSLT_FL_RANGE],target1[CuasMotionLMA.RSLT_TARGET_ID],k);
double disparity = 0;
if (Double.isInfinite(range)) {
disparity = ersCorrection.getDisparityFromZ(range);
}
double [] xyz = null, wxyz = null;
double agl = Double.NaN;
double az = Double.NaN;
if (disparity > 0) {
xyz = ersCorrection.getWorldCoordinatesERS( // ersCorrection - reference
px, // double px, // pixel coordinate X in the reference view
py, // double py, // pixel coordinate Y in the reference view
disparity, // double disparity, // reference disparity
true, // boolean distortedView, // This camera view is distorted (diff.rect), false - rectilinear
OpticalFlow.ZERO3, // double [] reference_xyz, // this view position in world coordinates (typically ZERO3)
OpticalFlow.ZERO3); // double [] reference_atr, // this view orientation relative to world frame (typically ZERO3)
wxyz = ersCorrection.getWorldCoordinatesERS( // ersCorrection - reference
px, // double px, // pixel coordinate X in the reference view
py, // double py, // pixel coordinate Y in the reference view
disparity, // double disparity, // reference disparity
true, // boolean distortedView, // This camera view is distorted (diff.rect), false - rectilinear
OpticalFlow.ZERO3, // double [] reference_xyz, // this view position in world coordinates (typically ZERO3)
camera_atr); // double [] reference_atr, // this view orientation relative to world frame (typically ZERO3)
agl = wxyz[1];
az = Math.atan2(wxyz[2], -wxyz[0]); // TODO: check sign
} else {
az = (px1 - sensor_width/2) * ifov; // + camera_atr;
}
while (az <0 ) az += 2*Math.PI;
double az_deg = az * 1280/Math.PI;
double vel_away = interpolate (target0[CuasMotionLMA.RSLT_VEL_AWAY], target1[CuasMotionLMA.RSLT_VEL_AWAY], k);
if (range > max_axial_range) {
vel_away = 0;
}
double vel_right = interpolate (target0[CuasMotionLMA.RSLT_VEL_RIGHT],target1[CuasMotionLMA.RSLT_VEL_RIGHT],k);
double vel_up = interpolate (target0[CuasMotionLMA.RSLT_VEL_UP], target1[CuasMotionLMA.RSLT_VEL_UP], k);
double vel_hor = Math.sqrt(vel_away*vel_away + vel_right*vel_right);
if (range > max_axial_range) {
vel_hor *= Math.sqrt(2); // increase horizontal speed when one component is unknown by sqrt(2)
}
double hdg = Math.atan2(-vel_right, vel_away);
while (hdg <0 ) hdg += 2*Math.PI;
double hdg_deg = hdg * 180/ Math.PI;
StringBuffer sb = new StringBuffer();
if ((annot_mode & (1 << ANNOT_ID)) != 0){
sb.append(String.format(" ID %03d\n", id));
}
if ((annot_mode & (1 << ANNOT_RANGE)) != 0){
if (range <= max_annot_range) { // handles POSITIVE_INFINITY
sb.append(String.format("RNG %4.0f\n", range));
} else if (show_inf && !Double.isNaN(range)){
if (show_inf_gt) {
sb.append(String.format("RNG>%4.0f\n", max_annot_range));
} else {
sb.append("RNG \u221E\n");
}
} else if (reserve_missing_fields){
sb.append("\n");
}
}
if ((annot_mode & (1 << ANNOT_TRANG)) != 0){
if (!Double.isNaN(true_range)) {
sb.append(String.format("TRNG%4.0f\n", range));
} else if (reserve_missing_fields){
sb.append("\n");
}
}
if ((annot_mode & (1 << ANNOT_AGL)) != 0){
if (!Double.isNaN(agl) && (range <= max_annot_range)) {
sb.append(String.format("AGL %4.0f\n", agl));
} else if (reserve_missing_fields){
sb.append("\n");
}
}
if ((annot_mode & (1 << ANNOT_AZ)) != 0){
if (!Double.isNaN(az_deg)) {
sb.append(String.format("AZM %4.0f\n", az_deg));
} else if (reserve_missing_fields){
sb.append("\n");
}
}
if ((annot_mode & (1 << ANNOT_VS)) != 0){
if (!Double.isNaN(vel_up) && (range <= max_annot_range)) {
sb.append(String.format("VS %4.1f\n", vel_up));
} else if (reserve_missing_fields){
sb.append("\n");
}
}
if ((annot_mode & (1 << ANNOT_GS)) != 0){
if (!Double.isNaN(vel_hor) && (range <= max_annot_range)) {
sb.append(String.format("GS %4.1f\n", vel_hor));
} else if (reserve_missing_fields){
sb.append("\n");
}
}
if ((annot_mode & (1 << ANNOT_HDG)) != 0){
if (!Double.isNaN(hdg_deg) && (range <= max_annot_range)) {
sb.append(String.format("HDG %4.0f\n", hdg_deg));
} else if (reserve_missing_fields){
sb.append("\n");
}
}
return sb.toString();
}
public static double interpolate (
double v0,
double v1,
double k) {
return k*v1 + (1-k) * v0;
}
public static void drawCircle(
ColorProcessor colorProcessor,
Color color, // or null
double xc,
double yc,
double radius) {
double radius2 = radius*radius;
int icolor = color.getRGB() | 0xff000000; // needed?
if (color != null) colorProcessor.setColor(color);
int x0 = (int) Math.floor(xc - radius);
int y0 = (int) Math.floor(yc - radius);
int x1 = (int) Math.ceil (xc + radius);
int y1 = (int) Math.ceil (yc + radius);
int width = colorProcessor.getWidth();
int [] pixels = (int[]) colorProcessor.getPixels();
for (int y = y0; y <= y1; y++) {
double dy = y-yc;
double dy2 = dy*dy;
for (int x = x0; x < x1; x++) {
double dx = x-xc;
double r2 = dx*dx + dy2;
if (r2 <= radius2) {
pixels[x+y*width] = icolor;
}
}
}
}
private static int [] getSceneIndices(
String [] key_titles, // corresponding to top targets dimension
String [] scene_titles) { // all titles, one per frame
int [] indices = new int [key_titles.length];
int indx = 0;
for (int i =0; i < key_titles.length; i++) {
for (; !key_titles[i].equals(scene_titles[indx]); indx++);
indices[i] = indx++;
}
return indices;
}
public static void createGrid(
ColorProcessor colorProcessor,
QuadCLT scene,
int [] camera_xy0,
int height,
double camera_az,
Color color_grid,
Font font, // if null = do not annotate
double max_z, // corresponds to height
int grid_azimuth_top, // = 5; // where to put azimuth
int grid_margin, // = 5; // grid annotation from left/right
double ring_step, // = 100.0;
double dir_step) // = 5.0; // degrees
{
int width = colorProcessor.getWidth();
double ifov = scene.getGeometryCorrection().getIFOV();
int sensor_width = scene.getGeometryCorrection().getSensorWH()[0];
double hfov = 2*Math.atan(ifov * sensor_width/2);
double az0 = (camera_az - hfov/2);
double az1 = (camera_az + hfov/2);
double az0_deg = az0 * 180/Math.PI;
double az1_deg = az1 * 180/Math.PI;
int az_stp0_deg = (int) Math.ceil(az0_deg/dir_step);
int az_stp1_deg = (int) Math.floor(az1_deg/dir_step);
colorProcessor.setColor(color_grid);
if (font != null) {
colorProcessor.setFont(font);
}
//TODO: add labels to the grid
int x0 = camera_xy0[0];
int y0 = camera_xy0[1];
for (int az_stp_deg = az_stp0_deg; az_stp_deg <= az_stp1_deg; az_stp_deg++) {
double az_deg = az_stp_deg * dir_step; // degrees
double rel_az = az_deg * Math.PI/180-camera_az; // angle in radians from vertical
int y1 = y0 - height;
int x1 = (int) Math.round (x0 + height * Math.tan(rel_az));
colorProcessor.drawLine(x0, y0, x1, y1);
if (font!= null) {
String txt = String.format("%.0f",az_deg);
Rectangle txt_box = colorProcessor.getStringBounds(txt);
int xt = x1 - txt_box.width/2;
if (xt < grid_margin) {
xt = grid_margin;
}
if ((xt + txt_box.width) > (width -grid_margin)) {
xt = width -grid_margin - txt_box.width;
}
int yt = grid_azimuth_top - txt_box.y; // txt_box.y is negative
colorProcessor.drawString(txt, xt, yt, Color.BLACK); // black background
}
}
double max_dist = max_z/Math.cos(hfov/2);
for (double rad = ring_step; rad < max_dist; rad += ring_step) {
double rad_pix = rad * height / max_z;
// draw ring segment with 1-pix long segments
double ang_step = 1.0/rad;
boolean pen_down = false;
int last_x= 0, last_y = 0;
for (double az = az0-camera_az; az <= az1-camera_az; az+= ang_step) {
int x = x0 + (int) Math.round(rad_pix*Math.sin(az));
int dy = (int) Math.round(rad_pix*Math.cos(az));
if (dy > height) {
pen_down = false;
} else {
if (pen_down) {
colorProcessor.lineTo(x, y0-dy);
} else {
colorProcessor.moveTo(x, y0-dy);
}
pen_down = true;
}
last_x = x;
last_y = y0-dy;
}
if (font != null) {
String txt = String.format("%.0f",rad);
Rectangle txt_box = colorProcessor.getStringBounds(txt);
int xt = last_x + grid_margin;
if ((xt + txt_box.width) > (width - grid_margin)) {
xt = width - grid_margin - txt_box.width;
}
// vertically align text center to the end of line
int yt = last_y - txt_box.y - txt_box.height/2; // txt_box.y < 0
colorProcessor.drawString(txt, xt, yt, Color.BLACK); // black background
}
}
return;
}
public static ImagePlus increaseCanvas(
ImagePlus imp, // color
int new_width,
int new_height,
int x0,
int y0) {
final ImageStack stack = imp.getStack();
final ImageStack new_stack = new ImageStack (new_width, new_height, stack.getSize());
final int old_width = imp.getWidth();
final int old_height = imp.getHeight();
final Thread[] threads = ImageDtt.newThreadArray();
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nSlice = ai.getAndIncrement(); nSlice < stack.getSize(); nSlice = ai.getAndIncrement()) {
ColorProcessor cp = (ColorProcessor) stack.getProcessor(nSlice+1);
int [] old_pixels = (int []) cp.getPixels(); // [nSlice].getPixels();
int [] new_pixels = new int[new_width * new_height];
for (int row = 0; row < old_height; row++) {
System.arraycopy(
old_pixels,
row * old_width,
new_pixels,
(y0 + row) * new_width + x0,
old_width);
}
new_stack.setProcessor(new ColorProcessor(new_width, new_height, new_pixels), nSlice+1);
new_stack.setSliceLabel(stack.getSliceLabel(nSlice+1), nSlice+1); // copy labels
}
}
};
}
ImageDtt.startAndJoin(threads);
String title2 = imp.getTitle()+"-2";
ImagePlus imp_new = new ImagePlus(title2,new_stack);
return imp_new;
}
public static void insertImage(
ImagePlus imp_base, // color
ImagePlus imp_insert, // color
int x0, // where to insert
int y0) {
final int base_width = imp_base.getWidth();
// final int base_height = imp_base.getHeight();
final int insert_width = imp_insert.getWidth();
final int insert_height = imp_insert.getHeight();
final ImageStack stack_base = imp_base.getStack();
final ImageStack stack_insert = imp_insert.getStack();
final Thread[] threads = ImageDtt.newThreadArray();
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nSlice = ai.getAndIncrement(); nSlice < stack_base.getSize(); nSlice = ai.getAndIncrement()) {
ColorProcessor cp_base = (ColorProcessor) stack_base.getProcessor(nSlice+1);
int [] base_pixels = (int []) cp_base.getPixels(); // [nSlice].getPixels();
ColorProcessor cp_insert = (ColorProcessor) stack_insert.getProcessor(nSlice+1); // slice out of range
int [] insert_pixels = (int []) cp_insert.getPixels(); // [nSlice].getPixels();
for (int row = 0; row < insert_height; row++) {
System.arraycopy(
insert_pixels,
row * insert_width,
base_pixels,
(y0 + row) * base_width + x0,
insert_width);
}
// new_stack.setProcessor(new ColorProcessor(new_width, new_height, new_pixels), nSlice+1);
// new_stack.setSliceLabel(stack.getSliceLabel(nSlice+1), nSlice+1); // copy labels
}
}
};
}
ImageDtt.startAndJoin(threads);
// String title2 = imp.getTitle()+"-2";
// ImagePlus imp_new = new ImagePlus(title2,new_stack);
return; // imp_new;
}
private static int findMatchingTarget(
double [][] targets,
......@@ -4336,57 +5022,40 @@ public class CuasMotion {
}
public static String getTargetTextOld(
public static String getSignedDouble(
double d,
String format) {
String s = (d > 0)? "+" : ( (d <0)? "-":" ");
return s+ String.format(format, Math.abs(d));
}
/*
public static String getTargetText(
CLTParameters clt_parameters,
double [] target) {
double target_x = target[TARGET_X];
double target_y = target[TARGET_Y];
double target_vx = target[TARGET_VX];
double target_vy = target[TARGET_VY];
String omega = "\u03A9";
double fps = 60.0;
double [][] az_el_oaz_oel= getPixToAzElev(
clt_parameters, // CLTParameters clt_parameters,
target[TARGET_X], // double target_x,
target[TARGET_Y], // double target_y,
target[TARGET_VX], // double target_vx,
target[TARGET_VY]); // double target_vy);
String number_format = "%3.0f";
String omega_format = "%3.1f";
double ifov = clt_parameters.imp.cuas_ifov; // 0.05; // degree per pixel
int px0 = clt_parameters.imp.cuas_px0; // 283; // pixel with known azimuth
int py0 = clt_parameters.imp.cuas_py0; // 386; // pixel with known elevation
double az0 = clt_parameters.imp.cuas_az0; // 201.5; // degrees for cuas_px0;
double el0 = clt_parameters.imp.cuas_el0; // 0.0; // degrees for cuas_px0;
double az = (target_x - px0)*ifov+az0;
double el = -(target_y - py0)*ifov+el0;
double omega_az = target_vx * ifov * fps;
double omega_el = -target_vy * ifov * fps;
while (az < 0) {
az += 360;
}
while (az >= 360) {
az -= 360;
}
while (el < -180) {
el += 360;
}
while (el > 180) {
el -= 360;
}
String omega = "\u03A9";
String txt = "";
txt += " AZ "+String.format(number_format,az)+"\n";
txt += " EL "+ getSignedDouble(el,number_format)+"\n";
txt += omega+"AZ "+getSignedDouble(omega_az,omega_format)+"\n";
txt += omega+"EL "+getSignedDouble(omega_el,omega_format);
txt += " AZ "+String.format(number_format,az_el_oaz_oel[0][0])+"\n";
txt += " EL "+ getSignedDouble(az_el_oaz_oel[0][1],number_format)+"\n";
txt += omega+"AZ "+getSignedDouble(az_el_oaz_oel[1][0],omega_format)+"\n";
txt += omega+"EL "+getSignedDouble(az_el_oaz_oel[1][1],omega_format);
return txt;
}
public static String getSignedDouble(
double d,
String format) {
String s = (d > 0)? "+" : ( (d <0)? "-":" ");
return s+ String.format(format, Math.abs(d));
}
*/
public static String getTargetText(
CLTParameters clt_parameters,
GeometryCorrection gc,
double [] target) {
double [][] az_el_oaz_oel= getPixToAzElev(
clt_parameters, // CLTParameters clt_parameters,
gc, // GeometryCorrection gc,
target[TARGET_X], // double target_x,
target[TARGET_Y], // double target_y,
target[TARGET_VX], // double target_vx,
......@@ -4402,15 +5071,16 @@ public class CuasMotion {
return txt;
}
// TODO: improve, get correct calculations with distortions
public static double [][] getPixToAzElev(
CLTParameters clt_parameters,
GeometryCorrection gc,
double target_x,
double target_y,
double target_vx,
double target_vy) {
double ifov = clt_parameters.imp.cuas_ifov; // 0.05; // degree per pixel
double ifov = gc.getIFOVDegrees(); // clt_parameters.imp.cuas_ifov; // 0.05; // degree per pixel
int px0 = clt_parameters.imp.cuas_px0; // 283; // pixel with known azimuth
int py0 = clt_parameters.imp.cuas_py0; // 386; // pixel with known elevation
double az0 = clt_parameters.imp.cuas_az0; // 201.5; // degrees for cuas_px0;
......@@ -4532,11 +5202,13 @@ public class CuasMotion {
}
public void generateExtractFilterMovingTargets( // move parameters to clt_parameters
final boolean radar_mode,
final boolean video_pass, // if clt_parameters.cuas_clean_video=true, video_pass=0 - output TIFFS, but no video. If video_pass==1 - only video with no debug
final boolean batch_mode,
final float [][] fpixels,
double [][][] target_sequence, // non-overlap only if consider marked stronger tiles
final int debugLevel) {
/*
generateExtractFilterMovingTargets( // move parameters to clt_parameters
clt_parameters, // CLTParameters clt_parameters,
video_pass, // final boolean video_pass, // if clt_parameters.cuas_clean_video=true, video_pass=0 - output TIFFS, but no video. If video_pass==1 - only video with no debug
......@@ -4549,10 +5221,8 @@ public class CuasMotion {
scene_titles, // String [] scene_titles, // recreate slice_titles from scene titles?
slice_titles, // String [] slice_titles,
debugLevel); // final int debugLevel)
}
public static void generateExtractFilterMovingTargets( // move parameters to clt_parameters
CLTParameters clt_parameters,
final boolean video_pass, // if clt_parameters.cuas_clean_video=true, video_pass=0 - output TIFFS, but no video. If video_pass==1 - only video with no debug
......@@ -4565,7 +5235,7 @@ public class CuasMotion {
String [] scene_titles, // recreate slice_titles from scene titles?
String [] slice_titles,
final int debugLevel) {
// String model_prefix = parentCLT.getImageName()+getParametersSuffixRslt(clt_parameters,null);
*/
String model_prefix = parentCLT.getImageName()+getParametersSuffixRslt(clt_parameters,null);
boolean clean_video = clt_parameters.imp.cuas_clean_video; //true;// save video without any debug information for targets, output in TIFF files. False - same output for video and TIFFs
......@@ -4604,15 +5274,9 @@ public class CuasMotion {
boolean show_rng = clt_parameters.imp.cuas_show_rng; // true; // Show distance to target (range) in meters
boolean show_inf = clt_parameters.imp.cuas_show_inf; // true; // Show distance greater than max (or negativce) as infinity
boolean show_inf_gt = clt_parameters.imp.cuas_show_inf_gt; // false; // Use ">max" instead of infinity symbol
boolean show_true_rng = clt_parameters.imp.cuas_show_true_rng; // show true range (from the flight log)
double radar_range = clt_parameters.imp.cuas_radar_range; // maximal radar range in meters
String clean_suffix = "";
if (clean_video) {
if (video_pass) {
......@@ -4640,7 +5304,7 @@ public class CuasMotion {
double [][][] extended_targets = extendMotionScan(
target_sequence, // final double [][][] motion_scan,
null, // filter5, // final boolean [][] filtered, // centers, should be non-overlapped
cuasMotion.tilesX, // final int tilesX)
tilesX, // final int tilesX)
2, // final int range, // 1 or 2
null); // remain); // final int [] remain)
if (intermed_low) {
......@@ -4650,11 +5314,11 @@ public class CuasMotion {
// model_prefix+"-EXTENDED-TORENDER", // String title,
parentCLT.getImageName()+getParametersSuffixRslt(clt_parameters,"-EXTENDED-TORENDER"),
!batch_mode, // boolean show,
cuasMotion.tilesX); // int tilesX) {
tilesX); // int tilesX) {
parentCLT.saveImagePlusInModelDirectory(imp_extended);
}
float [][] fpixels_accumulated5x5 = cuasMotion.shiftAndRenderAccumulate(
float [][] fpixels_accumulated5x5 = shiftAndRenderAccumulate(
clt_parameters, // CLTParameters clt_parameters,
center_targ, // false, // final boolean center,
true, // final boolean fill_zeros,
......@@ -4671,23 +5335,17 @@ public class CuasMotion {
ImagePlus imp_accumulated5x5 = ShowDoubleFloatArrays.makeArrays(
fpixels_accumulated5x5, // float[][] pixels,
cuasMotion.gpu_max_width, // int width,
cuasMotion.gpu_max_height, // int height,
// model_prefix+"-ACCUMULATED"+(center_targ?"-CENTERED":""), //String title,
gpu_max_width, // int width,
gpu_max_height, // int height,
parentCLT.getImageName()+getParametersSuffixRslt(clt_parameters,"-ACCUMULATED"+(center_targ?"-CENTERED":"")),
slice_titles); //String [] titles)
imp_accumulated5x5.getProcessor().setMinAndMax(-input_range/2, input_range/2);
if (intermed_high) {
if (!batch_mode) imp_accumulated5x5.show();
parentCLT.saveImagePlusInModelDirectory(imp_accumulated5x5); // ImagePlus imp)
}
double velocity_scale = 1.0/corr_offset;
double [][][] targets60hz = new double [fpixels.length][][];
float [][] background = fpixels;
String ra_bg_suffix=(ra_background? ("-RABG"+corr_pairs):"");
if (ra_background) {
......@@ -4695,17 +5353,15 @@ public class CuasMotion {
background = runningGaussian(
fpixels, // final float [][] fpixels,
corr_pairs, // final int ra_length,
cuasMotion.gpu_max_width); // final int width)
gpu_max_width); // final int width)
} else {
background = runningAverage(
fpixels, // final float [][] fpixels,
corr_pairs, // final int ra_length,
cuasMotion.gpu_max_width); // final int width)
gpu_max_width); // final int width)
}
}
///// Now expects targets_sequence !!!!!!!!!!
float [][] replaced_targets = cuasMotion.shiftAndRenderTargets(
float [][] replaced_targets = shiftAndRenderTargets(
clt_parameters, // CLTParameters clt_parameters,
center_targ, // final boolean center,
mask_width, // final double mask_width,
......@@ -4723,8 +5379,8 @@ public class CuasMotion {
if (save_mono) {
ImagePlus imp_replaced_targets = ShowDoubleFloatArrays.makeArrays(
replaced_targets, // float[][] pixels,
cuasMotion.gpu_max_width, // int width,
cuasMotion.gpu_max_height, // int height,
gpu_max_width, // int width,
gpu_max_height, // int height,
// model_prefix+"-REPLACED-TARGETS"+(ra_background? "-RABG":""), //String title,
parentCLT.getImageName()+getParametersSuffixRslt(clt_parameters,"-REPLACED-TARGETS"+(ra_background? "-RABG":"")),
scene_titles); //String [] titles)
......@@ -4755,12 +5411,43 @@ public class CuasMotion {
annotate_uas, // final boolean show_uas,
frame0, // final int frame0,
corr_inc, // final int frame_step,
cuasMotion.gpu_max_width, // final int width,
gpu_max_width, // final int width,
// model_prefix+"-RGB"+ra_bg_suffix+clean_suffix, // String title,
parentCLT.getImageName()+getParametersSuffixRslt(clt_parameters,"-RGB"+ra_bg_suffix+clean_suffix),
scene_titles, // String [] titles,
uasLogReader, // UasLogReader uasLogReader,
debugLevel); // final int debugLevel) {
if (radar_mode) {
// extend image to the left
int height2 = imp_color.getHeight();
int width2 = (int) (16.0/9.0*height2);
int offset_x = width2 - imp_color.getWidth();
// ImagePlus imp_color2
imp_color= increaseCanvas(
imp_color, // ImagePlus imp, // color
width2, // int new_width,
height2, // int new_height,
offset_x, // int x0,
0); // int y0)
// imp_color2.show();
// parentCLT.saveImagePlusInModelDirectory(imp_color2);
ImagePlus img_radar = generateRadarImage(
clt_parameters,
-1, // int annot_mode, // specify bits
parentCLT, // QuadCLT scene,
target_sequence,
uasLogReader, // contains camera orientation (getCameraATR())
parentCLT.getImageName(), // title,
slice_titles, // corresponding to top targets dimension
scene_titles, // all titles, one per frame
debugLevel);
insertImage(
imp_color, // ImagePlus imp_base, // color
img_radar, // ImagePlus imp_insert, // color
0, // int x0, // where to insert
0); // int y0)
}
// save tiff in model directory
if (imp_color != null) {
if (save_color) {
......@@ -4779,6 +5466,7 @@ public class CuasMotion {
debugLevel); // int debugLevel) {
}
}
return;
}
/**
......
......@@ -99,9 +99,10 @@ public class CuasMotionLMA {
public static final int RSLT_FL_RANGE = 57; // flight log range (meters)
public static final int RSLT_INFINITY = 58; // disparity at infinity used for range calculation
public static final int RSLT_TARGET_ID = 59; // unique target id for the whole sequence of segments. 1 is reserved for the UAS
public static final int RSLT_VEL_AXIAL = 60; // axial velocity (positive - away), range derivative (m/s), calculated from the difference to the previous/next series range
public static final int RSLT_VEL_LATERAL = 61; // lateral velocity ( calculated from angular and RSLT_GRANGE
public static final int RSLT_RANGE_LIN = 62; // range linear interpolated using previous/next series if the same target is present there
public static final int RSLT_VEL_AWAY = 60; // axial velocity (positive - away), range derivative (m/s), calculated from the difference to the previous/next series range
public static final int RSLT_VEL_RIGHT = 61; // lateral velocity, right ( calculated from angular and RSLT_GRANGE
public static final int RSLT_VEL_UP = 62; // lateral velocity, up ( calculated from angular and RSLT_GRANGE
public static final int RSLT_RANGE_LIN = 63; // range linear interpolated using previous/next series if the same target is present there
public static final int RSLT_LEN = RSLT_RANGE_LIN + 1;
public static final String [] LMA_TITLES =
......@@ -121,7 +122,7 @@ public class CuasMotionLMA {
"Disparity","Disparity-Diff","Strength","Range","Global-index",
"segment-length","segment-disparity","segment_disp-diff", "segment-strength", "segment-range",
"FLOG-px","FLOG-pY","FLOG-DISP","FLOG-range","infinity",
"GTarget-ID","Range-derivative"};
"GTarget-ID","Vel-away", "Vel-right", "Vel-up","Range-linear"};
public static final String EXTRA_SLICE_DISCARD_ON_LOAD = "Targets";
public static final int FAIL_NONE = 0;
public static final int FAIL_MOTION = 1; // motion strength/fraction too low
......
......@@ -11,6 +11,7 @@ import java.util.concurrent.atomic.AtomicInteger;
import com.elphel.imagej.cameras.CLTParameters;
import com.elphel.imagej.gpu.GPUTileProcessor;
import com.elphel.imagej.ims.UasLogReader;
import com.elphel.imagej.tileprocessor.GeometryCorrection;
import com.elphel.imagej.tileprocessor.ImageDtt;
import com.elphel.imagej.tileprocessor.QuadCLT;
......@@ -45,10 +46,12 @@ public class CuasMultiSeries {
final String [][] scene_titles; // keyframe scene titles/timestamps
int global_targets = 0;
QuadCLT master_CLT = null;
final UasLogReader uasLogReader;
// int debugLevel = 0;
public CuasMultiSeries (
CLTParameters clt_parameters,
UasLogReader uasLogReader,
QuadCLT quadCLT_main,
String path,
String version) {
......@@ -69,6 +72,7 @@ public class CuasMultiSeries {
master_CLT = quadCLT_main;
model_dirs = new File[num_series];
model_names = new String[num_series];
this.uasLogReader = uasLogReader;
for (int nser = 0; nser < num_series; nser++) {
model_names[nser] = scene_dirs[nser].getName(); // 1747803230_276111-CENTER
model_dirs[nser] = new File(scene_dirs[nser], version);
......@@ -149,7 +153,6 @@ public class CuasMultiSeries {
combineGlobalTargets(
true, // boolean skip_assigned, // if global ID is assigned, do not mess wi that pair
debugLevel); // int debugLevel)
printAssignmentStats();
printAssignments();
printReverseAssignments();
......@@ -158,9 +161,16 @@ public class CuasMultiSeries {
avg_range_ts = getAverageRangeTimestamp(
min_disparity_range, // double min_disparity_range){
min_disparity_velocity); // double min_disparit_velocity);
linearRangeInterpolation();
printAverageRanges(avg_range_ts);
printAverageVsUASRanges(avg_range_ts);
saveUpdatedTargets();
//
ImagePlus imp_radar = testGenerateRadarImage(
clt_parameters, // CLTParameters clt_parameters,
15, // int nser,
uasLogReader, // UasLogReader uasLogReader, // contains camera orientation (getCameraATR())
debugLevel); // int debugLevel) {
return;
}
......@@ -239,12 +249,13 @@ public class CuasMultiSeries {
public void printAverageVsUASRanges(double [][][] avg_range_ts) {
System.out.println("printAverageVsUASRanges(): Compare average UAS range with flight log");
int ngtarg = 0;
System.out.println("scene,range,fl_range,axial_velocity,disparity");
System.out.println("name, timestamp,scene,range,fl_range,axial_velocity,disparity");
for (int nser = 0; nser < avg_range_ts[ngtarg].length; nser++) if (avg_range_ts[ngtarg][nser] != null) {
int mid_seq = (int) +avg_range_ts[ngtarg][nser][2];
int fl_tile = uas_tiles[nser][mid_seq];
double fl_range = targets_multi_series[nser][mid_seq][fl_tile][CuasMotionLMA.RSLT_FL_RANGE];
System.out.println(nser+", "+avg_range_ts[ngtarg][nser][AVG_RANGE]+", "+fl_range+", "+avg_range_ts[ngtarg][nser][AVG_VELOCIY]+
double ts = QuadCLT.getTimeStamp(model_names[nser]);
System.out.println(model_names[nser]+", "+ts+", "+nser+", "+avg_range_ts[ngtarg][nser][AVG_RANGE]+", "+fl_range+", "+avg_range_ts[ngtarg][nser][AVG_VELOCIY]+
", "+avg_range_ts[ngtarg][nser][AVG_DISPARITY]);
}
......@@ -546,6 +557,7 @@ public class CuasMultiSeries {
public int combineGlobalTargets(
boolean skip_assigned, // if global ID is assigned, do not mess wi that pair
int debugLevel) {
final boolean old_mode = false; // true;
int debug_min = -2; // if >= then print
boolean debug = debugLevel>= debug_min;
final GeometryCorrection gc = master_CLT.getGeometryCorrection();
......@@ -724,6 +736,7 @@ public class CuasMultiSeries {
}
}
}
if (old_mode) {
for (int tgrp = 0; tgrp < local_imap[nser].length; tgrp++) if (local_imap[nser][tgrp].length > 0){
int target_id = -1;
for (int i = 0; i < local_imap[nser][tgrp].length; i++) {
......@@ -744,7 +757,59 @@ public class CuasMultiSeries {
}
}
}
} else {
/// Now new global target_id will start with the local target sequences (having most tiles).
/// This is done for later annotations ordering - prefer longer targets (display over others)
ArrayList<Point> unassigned_list= new ArrayList<Point>(); // list of unassigned groups index:num_tiles
for (int tgrp = 0; tgrp < local_imap[nser].length; tgrp++) if (local_imap[nser][tgrp].length > 0){
int ntarg0 = local_imap[nser][tgrp][0]; // always exists as (local_imap[nser][tgrp].length > 0)
if (target_map[nser][ntarg0] == TARGET_INDEX_NONE) { // already assign to all targets in a group
int num_occur = 0;
for (int i = 0; i < local_imap[nser][tgrp].length; i++) {
int ntarg = local_imap[nser][tgrp][i];
// count number of this target group keyframes
for (int nseq = 0; nseq < num_seq; nseq++) {
int [] ltm = linked_targets_multi[nser][ntarg][nseq];
if (ltm != null) {
num_occur++;
}
}
}
unassigned_list.add(new Point(tgrp, num_occur));
}
}
// sort by decreasing length (longest - first)
Collections.sort(unassigned_list, new Comparator<Point>() {
@Override
public int compare(Point lhs, Point rhs) {
return Integer.compare(rhs.y, lhs.y);
}
});
for (Point p:unassigned_list) {
int tgrp = p.x;
int target_id = next_global_target_id++;
for (int i = 0; i < local_imap[nser][tgrp].length; i++) {
int ntarg = local_imap[nser][tgrp][i];
target_map[nser][ntarg] = target_id;
for (int nseq = 0; nseq < num_seq; nseq++) {
int [] ltm = linked_targets_multi[nser][ntarg][nseq];
if (ltm != null) {
int ntile = ltm[0];
double [] target = targets_multi_series[nser][nseq][ntile];
target[CuasMotionLMA.RSLT_TARGET_ID] = target_id;
}
}
}
}
if (debug) {
System.out.println("combineGlobalTargets() nser="+nser+", unassigned_list.size()="+unassigned_list.size()+", next_global_target_id="+next_global_target_id);
}
}
}
// print more debug
target_rmap = new int [next_global_target_id - 1][][];
ai.set(0);
......@@ -781,6 +846,52 @@ public class CuasMultiSeries {
return next_global_target_id;
}
public void linearRangeInterpolation() {
final GeometryCorrection gc = master_CLT.getGeometryCorrection();
final Thread[] threads = ImageDtt.newThreadArray();
final AtomicInteger ai = new AtomicInteger(0);
for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() {
public void run() {
for (int nGtarg = ai.getAndIncrement(); nGtarg < avg_range_ts.length; nGtarg = ai.getAndIncrement()) {
int target_id = nGtarg +1;
for (int nser = 0; nser < avg_range_ts[nGtarg].length; nser++) if (avg_range_ts[nGtarg][nser] != null) {
double fps = getFps(nser);
double avg_ts = avg_range_ts[nGtarg][nser][AVG_TIMESTASMP];
double avg_range = avg_range_ts[nGtarg][nser][AVG_RANGE];
double avg_away = avg_range_ts[nGtarg][nser][AVG_VELOCIY];
for (int ntarg=0; ntarg < target_map[nser].length; ntarg++) if (target_map[nser][ntarg] == target_id){ // same global id
int nseq_start = targets_start_end[nser][ntarg][0];
int nseq_end = targets_start_end[nser][ntarg][1];
for (int nseq = nseq_start; nseq <=nseq_end; nseq++) {
int ntile = linked_targets_multi[nser][ntarg][nseq][0];
double ts = getKeyTimeStamp(nser, nseq);
double [] target = targets_multi_series[nser][nseq][ntile];
double range = avg_range;
if (avg_away != 0) {
range += avg_away * (ts - avg_ts);
}
target[CuasMotionLMA.RSLT_RANGE_LIN] = range;
double [] vlateral = getLateralVelocity (
clt_parameters, // CLTParameters clt_parameters,
gc, // GeometryCorrection gc,
target, // double [] target0, // should contain RSLT_VX, RSLT_VX, RSLT_GRANGE,
range, // double range,
fps); // double fps){ // velocity_scale times fps
target[CuasMotionLMA.RSLT_VEL_RIGHT] = vlateral[0];
target[CuasMotionLMA.RSLT_VEL_UP] = vlateral[1];
target[CuasMotionLMA.RSLT_VEL_AWAY] = avg_away;
}
}
}
}
}
};
}
ImageDtt.startAndJoin(threads);
return;
}
public double [][][] getAverageRangeTimestamp(
double min_disparit_range,
double min_disparit_velocity){
......@@ -1060,18 +1171,27 @@ public class CuasMultiSeries {
return err2;
}
@Deprecated
/**
* Get lateral velocity in meters/s
* @param clt_parameters
* @param gc
* @param target0
* @param target1
* @param range
* @param fps
* @return
*/
private static double getLateralVelocity (
CLTParameters clt_parameters,
GeometryCorrection gc,
double [] target0, // should contain RSLT_VX, RSLT_VX, RSLT_GRANGE,
double [] target1, // should contain RSLT_VX, RSLT_VX, RSLT_GRANGE,
double range,
double fps){ // velocity_scale times fps
// TODO: Improve to use actual geometry
double ifov_deg = clt_parameters.imp.cuas_ifov; // 0.05; // degree per pixel
double ifov = ifov_deg / 180 * Math.PI;
double ifov = gc.getIFOV();
double velocity_scale = CuasMotion.getFrameVelocityScale(clt_parameters); // == 1.0/clt_parameters.imp.cuas_corr_offset;
double range = target0[CuasMotionLMA.RSLT_GRANGE]; // meters
double vpx = 0.5*(target0[CuasMotionLMA.RSLT_VX] + target1[CuasMotionLMA.RSLT_VX]) * velocity_scale * fps;
double vpy = 0.5*(target0[CuasMotionLMA.RSLT_VY] + target1[CuasMotionLMA.RSLT_VY]) * velocity_scale * fps;
double vxm = vpx * ifov * range;
......@@ -1080,34 +1200,45 @@ public class CuasMultiSeries {
return vm;
}
public static double [] getLateralVelocity (
CLTParameters clt_parameters,
GeometryCorrection gc,
double [] target0, // should contain RSLT_VX, RSLT_VX, RSLT_GRANGE,
double fps){ // velocity_scale times fps
return getLateralVelocity (
clt_parameters,
gc,
target0, // should contain RSLT_VX, RSLT_VX, RSLT_GRANGE,
target0[CuasMotionLMA.RSLT_GRANGE], //double range,
fps); // velocity_scale times fps
}
/**
*
* Calculate lateral (horizontal - right, vertical up} velocities in m/s
* @param clt_parameters
* @param gc
* @param target0
* @param target1
* @param range
* @param fps
* @return
*/
private static double getLateralVelocity (
public static double [] getLateralVelocity (
CLTParameters clt_parameters,
GeometryCorrection gc,
double [] target0, // should contain RSLT_VX, RSLT_VX, RSLT_GRANGE,
double [] target1, // should contain RSLT_VX, RSLT_VX, RSLT_GRANGE,
double range,
double fps){ // velocity_scale times fps
double ifov = gc.getIFOV();
double velocity_scale = CuasMotion.getFrameVelocityScale(clt_parameters); // == 1.0/clt_parameters.imp.cuas_corr_offset;
double vpx = 0.5*(target0[CuasMotionLMA.RSLT_VX] + target1[CuasMotionLMA.RSLT_VX]) * velocity_scale * fps;
double vpy = 0.5*(target0[CuasMotionLMA.RSLT_VY] + target1[CuasMotionLMA.RSLT_VY]) * velocity_scale * fps;
double vpx = target0[CuasMotionLMA.RSLT_VX] * velocity_scale * fps;
double vpy = target0[CuasMotionLMA.RSLT_VY] * velocity_scale * fps;
double vxm = vpx * ifov * range;
double vym = vpy * ifov * range;
double vm = Math.sqrt(vxm*vxm+vym*vym);
return vm;
return new double [] {vxm, -vym};
}
private static double getLateralVelocity (
CLTParameters clt_parameters,
GeometryCorrection gc,
......@@ -1122,8 +1253,6 @@ public class CuasMultiSeries {
target1, // should contain RSLT_VX, RSLT_VX, RSLT_GRANGE,
range,
fps); // velocity_scale times fps
}
......@@ -1189,8 +1318,9 @@ public class CuasMultiSeries {
double new_range = target[CuasMotionLMA.RSLT_GRANGE];
double range_scale = new_range/old_range;
if ((range_scale > 0.1) && (range_scale < 10)) { // to deal with NaN and infinities
target[CuasMotionLMA.RSLT_VEL_AXIAL] *= range_scale; // could be NaN - OK
target[CuasMotionLMA.RSLT_VEL_LATERAL] *= range_scale; // could be NaN - OK
target[CuasMotionLMA.RSLT_VEL_AWAY] *= range_scale; // could be NaN - OK
target[CuasMotionLMA.RSLT_VEL_RIGHT] *= range_scale; // could be NaN - OK
target[CuasMotionLMA.RSLT_VEL_UP] *= range_scale; // could be NaN - OK
}
target[CuasMotionLMA.RSLT_RANGE_LIN] = recalculateRangeAfterInfinityChange(
target[CuasMotionLMA.RSLT_RANGE_LIN],
......@@ -1210,6 +1340,43 @@ public class CuasMultiSeries {
return;
}
public ImagePlus testGenerateRadarImage(
CLTParameters clt_parameters,
int nser,
UasLogReader uasLogReader, // contains camera orientation (getCameraATR())
int debugLevel) {
int annot_mode = -1; // specify bits
String image_name = model_names[nser];
int corr_pairs = clt_parameters.imp.cuas_corr_pairs;
boolean ra_background = clt_parameters.imp.cuas_ra_background; // true;
String ra_bg_suffix=(ra_background? ("-RABG"+corr_pairs):"");
String clean_suffix = ""; // or "-CLEAN"
String lwir_image_title = image_name+CuasMotion.getParametersSuffixRslt(clt_parameters,"-RGB"+ra_bg_suffix+clean_suffix)+".tiff";
String lwir_path = new File(model_dirs[nser], lwir_image_title).toString();
ImagePlus imp_lwir = new ImagePlus(lwir_path);
if (imp_lwir.getWidth()> 0) {
System.out.println("Read : "+lwir_path);
String title = image_name+"-RADAR";
ImagePlus imp_radar=CuasMotion.generateRadarImage(
clt_parameters,
annot_mode, // int annot_mode, // specify bits
master_CLT, // QuadCLT scene,
targets_multi_series[nser],
uasLogReader, // contains camera orientation (getCameraATR())
title,
scene_titles[nser], // corresponding to top targets dimension
imp_lwir, // all titles, one per frame
debugLevel);
imp_radar.show();
return imp_radar;
} else {
System.out.println("File not found: "+lwir_path);
}
// final String [][] scene_titles; // keyframe scene titles/timestamps
return null;
}
public void setupGlobalTargets(
) {
double known_err= clt_parameters.imp.cuas_known_err;
......
......@@ -77,6 +77,7 @@ public class CuasRanging {
boolean smooth_omegas = clt_parameters.imp.cuas_smooth_omegas;
boolean log_ranging = clt_parameters.imp.cuas_log_ranging; // true; // Log ranging iterations
boolean reuse_disparity = clt_parameters.imp.cuas_reuse_disparity;
boolean radar_mode = clt_parameters.imp.cuas_reuse_globals;
/// double rng_limit = clt_parameters.imp.cuas_rng_limit;
boolean generate_output = clt_parameters.imp.cuas_generate; // generate and save targets Tiff and/or video files
......@@ -212,20 +213,44 @@ public class CuasRanging {
center_CLT, // QuadCLT parentCLT,
uasLogReader, // UasLogReader uasLogReader,
debugLevel); // int debugLevel)
double[][][] targets; // = null;
/*
cuasMotion.processMovingTargetsMulti( // will remove rendering
batch_mode, // final boolean batch_mode,
fpixels, // final float [][] fpixels,
debugLevel); // final int debugLevel) {
double[][][] targets = cuasMotion.getTargets();
*/
// String model_prefix = center_CLT.getImageName()+CuasMotion.getParametersSuffixRanging(clt_parameters,null);
// double [][] glob_stats = null;
if (radar_mode) {
double [][][] disparity_targets = null;
String disparity_path = center_CLT.getImageName()+CuasMotion.getParametersSuffixRanging(clt_parameters,TARGET_GLOBALS_SUFFIX); // may later be directly provided (e.g. select specific version)
if (disparity_path.indexOf(Prefs.getFileSeparator()) < 0) { // now always
String x3d_path = center_CLT.getX3dDirectory();
disparity_path = x3d_path + Prefs.getFileSeparator() + disparity_path+".tiff";
}
disparity_targets = CuasMotion.getTargetsFromHyperAugment(disparity_path);
if (disparity_targets == null) {
System.out.println("processMovingTargetsMulti(): FAILED TO READ TARGET DATA FROM "+disparity_path);
return null;
} else {
targets = disparity_targets;
System.out.println("processMovingTargetsMulti(): re-using target disparities from "+disparity_path+", generating radar mode images");
}
} else {
cuasMotion.processMovingTargetsMulti( // will remove rendering
batch_mode, // final boolean batch_mode,
fpixels, // final float [][] fpixels,
debugLevel); // final int debugLevel) {
// double[][][]
targets = cuasMotion.getTargets();
if (rng_disp || rng_glob) {
double [][][] disparity_targets = null;
if (reuse_disparity) {
// String disparity_path = model_prefix+TARGET_DISPARITIES_SUFFIX; // may later be directly provided (e.g. select specific version)
// String disparity_path = model_prefix+TARGET_DISPARITIES_SUFFIX; // may later be directly provided (e.g. select specific version)
String disparity_path = center_CLT.getImageName()+CuasMotion.getParametersSuffixRanging(clt_parameters,TARGET_DISPARITIES_SUFFIX); // may later be directly provided (e.g. select specific version)
if (disparity_path.indexOf(Prefs.getFileSeparator()) < 0) { // now always
String x3d_path = center_CLT.getX3dDirectory();
......@@ -239,7 +264,7 @@ public class CuasRanging {
System.out.println("processMovingTargetsMulti(): re-using target disparities from "+disparity_path);
}
}
// if ((disparity_targets == null) || rng_glob) { // for now - always recalculate if
// if ((disparity_targets == null) || rng_glob) { // for now - always recalculate if
if (disparity_targets == null) {
if (debugLevel > -4) {
System.out.println("detectTargets(): Generating target disparities");
......@@ -312,10 +337,12 @@ public class CuasRanging {
if (generate_csv) {
saveTargetStats(targets); // final double [][][] targets_single) {
}
}
if (generate_output) {
cuasMotion.generateExtractFilterMovingTargets( // move parameters to clt_parameters
radar_mode, // final boolean radar_mode,
false, // final boolean video_pass, // if clt_parameters.cuas_clean_video=true, video_pass=0 - output TIFFS, but no video. If video_pass==1 - only video with no debug
batch_mode, // final boolean batch_mode,
fpixels, // final float [][] fpixels,
......@@ -323,6 +350,7 @@ public class CuasRanging {
debugLevel); // final int debugLevel)
if (clean_video) {
cuasMotion.generateExtractFilterMovingTargets( // move parameters to clt_parameters
radar_mode, // final boolean radar_mode,
true, // final boolean video_pass, // if clt_parameters.cuas_clean_video=true, video_pass=0 - output TIFFS, but no video. If video_pass==1 - only video with no debug
batch_mode, // final boolean batch_mode,
fpixels, // final float [][] fpixels,
......@@ -2680,6 +2708,7 @@ public class CuasRanging {
public void saveTargetStats(
final double [][][] targets_single) {
final int tilesX = cuasMotion.getTilesX();
final GeometryCorrection gc = center_CLT.getGeometryCorrection();
final int tileSize = GPUTileProcessor.DTT_SIZE;
double cuas_infinity = clt_parameters.imp.cuas_infinity ; // 0.63; // disparity at infinity for targets
int flw_levels = clt_parameters.imp.cuas_flw_levels; // 1 - all, 2 - all and two halves, 3 - all, two halves and 4 quaters
......@@ -2722,6 +2751,7 @@ public class CuasRanging {
double [] uas_pXpYDRange = uasLogReader.getUasPxPyDRange(timestamp); // px, py, d- cuas_infinity (true disparity), range
double [][] az_el_oaz_oel= CuasMotion.getPixToAzElev(
clt_parameters, // CLTParameters clt_parameters,
gc, // GeometryCorrection gc,
uas_pXpYDRange[0], // double target_x,
uas_pXpYDRange[1], // double target_y,
0, // double target_vx,
......@@ -2750,6 +2780,7 @@ public class CuasRanging {
// calculate and output target azimuth, elevation, disparity (full) and range
az_el_oaz_oel= CuasMotion.getPixToAzElev(
clt_parameters, // CLTParameters clt_parameters,
gc, // GeometryCorrection gc,
xc, // double target_x, // null
yc, // double target_y,
vx, // double target_vx,
......
......@@ -855,7 +855,8 @@ min_str_neib_fpn 0.35
public int cuas_font_size = 8; // before scaling
public int cuas_font_type = 1; // 0 - PLAIN, 1 - BOLD, 2 - ITALIC
// AZ/EL calibration
public double cuas_ifov = 0.05; // degree per pixel
@Deprecated
public double cuas_ifov = 0.05; // degree per pixel Use gc.getIFOVDegrees() and gc.getIFOV() instead
public int cuas_px0 = 283; // pixel with known azimuth
public int cuas_py0 = 386; // pixel with known elevation
public double cuas_az0 = 201.5; // degrees for cuas_px0;
......@@ -927,6 +928,9 @@ min_str_neib_fpn 0.35
public double cuas_tmtch_disp = 0.1; // minimal disparity difference to estimate lateral velocity (smaller - consider matching)
public boolean cuas_tmtch_short = true; // when multiple conflicting matches, prefer shortest gap (false - longest combo pair length)
// parameters for "radar" image generation
public double cuas_radar_range = 1500.0; // maximal radar range in meters
// debug parameters
public boolean cuas_debug = false; // save debug images (and show them if not in batch mode)
public int cuas_dbg_rng_seq = -1; // Debug single sequence ranging (-1 - process all)
......@@ -946,6 +950,7 @@ min_str_neib_fpn 0.35
public boolean cuas_reuse_targets = true; // read previously calculated non-conflict (one per tile) targets
public String cuas_reuse_path = "-TARGETS_SINGLE-FINAL"; // either suffix (all parameters the same) or the full path (contains "/")
public boolean cuas_reuse_disparity = true; // read previously calculated non-conflict targets data with disparities (recalculate ranges)
public boolean cuas_reuse_globals = true; // if globally combined data is available, read it. If successful show 16:9 video
public double cuas_max_disp_diff = 0.05; // Maximal disparity difference during last change to consider disparity valid
public double cuas_min_disp_str = 0.4; // Minimal disparity strength to consider disparity valid
public double cuas_rng_limit = 5000; // maximal displayed distance to target
......@@ -2768,6 +2773,9 @@ min_str_neib_fpn 0.35
"Minimal disparity difference to estimate lateral velocity (smaller - consider matching).");
gd.addCheckbox ("Prefer shortest gap", this.cuas_tmtch_short,
"When multiple conflicting matches, prefer shortest gap (false - longest combo pair length).");
gd.addMessage("=== Radar image generation (check Radar mode below) ===");
gd.addNumericField("Maximal Radar distance", this.cuas_radar_range, 5,8,"m",
"Maximal Radar range in meters.");
gd.addMessage("=== Debug ===");
gd.addCheckbox ("Save/show debug images", this.cuas_debug,
......@@ -2805,6 +2813,8 @@ min_str_neib_fpn 0.35
"Either suffix (if all the parameters the same) or the full path (contains \"/\").");
gd.addCheckbox ("Reuse previosly calculated disparities", this.cuas_reuse_disparity,
"Read previously calculated non-conflict (one per tile) targets with disparities (recalculate ranges).");
gd.addCheckbox ("Reuse combined series (Radar mode)", this.cuas_reuse_globals,
"Read target data updated after global processing of scene series \"CUAS Combine\" command. If successful, go directly to output generation with 16:9 and a radar image.");
gd.addNumericField("Maximal disparity difference", this.cuas_max_disp_diff, 6,8,"pix",
"Maximal disparity difference during last change to consider disparity valid.");
gd.addNumericField("Minimal target disparity strength", this.cuas_min_disp_str, 6,8,"",
......@@ -3984,6 +3994,8 @@ min_str_neib_fpn 0.35
this.cuas_tmtch_disp= gd.getNextNumber();
this.cuas_tmtch_short = gd.getNextBoolean();
this.cuas_radar_range= gd.getNextNumber();
this.cuas_debug = gd.getNextBoolean();
this.cuas_dbg_rng_seq = (int) gd.getNextNumber();
this.cuas_dbg_rng_tgt = (int) gd.getNextNumber();
......@@ -4003,6 +4015,7 @@ min_str_neib_fpn 0.35
this.cuas_reuse_targets = gd.getNextBoolean();
this.cuas_reuse_path = gd.getNextString();
this.cuas_reuse_disparity = gd.getNextBoolean();
this.cuas_reuse_globals = gd.getNextBoolean();
this.cuas_max_disp_diff = gd.getNextNumber();
this.cuas_min_disp_str = gd.getNextNumber();
this.cuas_rng_limit = gd.getNextNumber();
......@@ -5089,6 +5102,8 @@ min_str_neib_fpn 0.35
properties.setProperty(prefix+"cuas_tmtch_disp", this.cuas_tmtch_disp+""); // double
properties.setProperty(prefix+"cuas_tmtch_short", this.cuas_tmtch_short+""); // boolean
properties.setProperty(prefix+"cuas_radar_range", this.cuas_radar_range+""); // double
properties.setProperty(prefix+"cuas_debug", this.cuas_debug+""); // boolean
properties.setProperty(prefix+"cuas_dbg_rng_seq", this.cuas_dbg_rng_seq+""); // int
properties.setProperty(prefix+"cuas_dbg_rng_tgt", this.cuas_dbg_rng_tgt+""); // int
......@@ -5109,6 +5124,7 @@ min_str_neib_fpn 0.35
properties.setProperty(prefix+"cuas_reuse_path", this.cuas_reuse_path+""); // String
properties.setProperty(prefix+"cuas_reuse_disparity", this.cuas_reuse_disparity+"");// boolean
properties.setProperty(prefix+"cuas_reuse_globals", this.cuas_reuse_globals+""); // boolean
properties.setProperty(prefix+"cuas_max_disp_diff", this.cuas_max_disp_diff+""); // double
properties.setProperty(prefix+"cuas_min_disp_str", this.cuas_min_disp_str+""); // double
properties.setProperty(prefix+"cuas_rng_limit", this.cuas_rng_limit+""); // double
......@@ -6170,6 +6186,8 @@ min_str_neib_fpn 0.35
if (properties.getProperty(prefix+"cuas_tmtch_disp")!=null) this.cuas_tmtch_disp=Double.parseDouble(properties.getProperty(prefix+"cuas_tmtch_disp"));
if (properties.getProperty(prefix+"cuas_tmtch_short")!=null) this.cuas_tmtch_short=Boolean.parseBoolean(properties.getProperty(prefix+"cuas_tmtch_short"));
if (properties.getProperty(prefix+"cuas_radar_range")!=null) this.cuas_radar_range=Double.parseDouble(properties.getProperty(prefix+"cuas_radar_range"));
if (properties.getProperty(prefix+"cuas_debug")!=null) this.cuas_debug=Boolean.parseBoolean(properties.getProperty(prefix+"cuas_debug"));
if (properties.getProperty(prefix+"cuas_dbg_rng_seq")!=null) this.cuas_dbg_rng_seq=Integer.parseInt(properties.getProperty(prefix+"cuas_dbg_rng_seq"));
if (properties.getProperty(prefix+"cuas_dbg_rng_tgt")!=null) this.cuas_dbg_rng_tgt=Integer.parseInt(properties.getProperty(prefix+"cuas_dbg_rng_tgt"));
......@@ -6189,6 +6207,7 @@ min_str_neib_fpn 0.35
if (properties.getProperty(prefix+"cuas_reuse_path")!=null) this.cuas_reuse_path=(String) properties.getProperty(prefix+"cuas_reuse_path");
if (properties.getProperty(prefix+"cuas_reuse_disparity")!=null) this.cuas_reuse_disparity=Boolean.parseBoolean(properties.getProperty(prefix+"cuas_reuse_disparity"));
if (properties.getProperty(prefix+"cuas_reuse_globals")!=null) this.cuas_reuse_globals=Boolean.parseBoolean(properties.getProperty(prefix+"cuas_reuse_globals"));
if (properties.getProperty(prefix+"cuas_max_disp_diff")!=null) this.cuas_max_disp_diff=Double.parseDouble(properties.getProperty(prefix+"cuas_max_disp_diff"));
if (properties.getProperty(prefix+"cuas_min_disp_str")!=null) this.cuas_min_disp_str=Double.parseDouble(properties.getProperty(prefix+"cuas_min_disp_str"));
if (properties.getProperty(prefix+"cuas_rng_limit")!=null) this.cuas_rng_limit=Double.parseDouble(properties.getProperty(prefix+"cuas_rng_limit"));
......@@ -7239,6 +7258,8 @@ min_str_neib_fpn 0.35
imp.cuas_tmtch_disp = this.cuas_tmtch_disp;
imp.cuas_tmtch_short = this.cuas_tmtch_short;
imp.cuas_radar_range = this.cuas_radar_range;
imp.cuas_debug = this.cuas_debug;
imp.cuas_dbg_rng_seq = this.cuas_dbg_rng_seq;
imp.cuas_dbg_rng_tgt = this.cuas_dbg_rng_tgt;
......@@ -7257,6 +7278,7 @@ min_str_neib_fpn 0.35
imp.cuas_reuse_targets = this.cuas_reuse_targets;
imp.cuas_reuse_path= this.cuas_reuse_path;
imp.cuas_reuse_disparity = this.cuas_reuse_disparity;
imp.cuas_reuse_globals = this.cuas_reuse_globals;
imp.cuas_max_disp_diff = this.cuas_max_disp_diff;
imp.cuas_min_disp_str = this.cuas_min_disp_str;
imp.cuas_rng_limit = this.cuas_rng_limit;
......
......@@ -6236,10 +6236,14 @@ public class OpticalFlow {
CuasMotion cuasMotion = cuasRanging.detectTargets(
uasLogReader, // UasLogReader uasLogReader,
batch_mode); // boolean batch_mode)
if (cuasMotion == null) {
System.out.println("Failed target detection. Probably, radar mode was selected but target file does not exist (created with \"CUAS Combine\" command)");
} else {
if (debugLevel > -4) {
System.out.println("Target detection DONE");
}
}
}
if (generate_mapped || reuse_video) { // modifies combo_dsn_final ?
int tilesX = master_CLT.getTileProcessor().getTilesX();
......
......@@ -8647,6 +8647,15 @@ if (debugLevel > -100) return true; // temporarily !
cuas_centers = new String [] {quadCLT_main.correctionsParameters.getCuasDir(),""};
}
*/
// Process UAS logs moved here to be used in 2 commands
UasLogReader uasLogReader = null;
String uas_log_path = quadCLT_main.correctionsParameters.getUasLogsPath();
System.out.println("Using UAS log file: "+uas_log_path);
if ((uas_log_path != null) && (uas_log_path.length() > 0)) {
uasLogReader = new UasLogReader(uas_log_path, quadCLT_main.correctionsParameters.cuasUasTimeStamp, null, quadCLT_main);
uasLogReader.setCameraATR(quadCLT_main.correctionsParameters.cuasCameraATR);
uasLogReader.setUASHomeNed(quadCLT_main.correctionsParameters.cuasSetHome? quadCLT_main.correctionsParameters.cuasUASHome : null);
}
switch (cuas_proc_mode) {
case 0 :
String uas_sky_mask_path = quadCLT_main.correctionsParameters.getUasSkyMask();
......@@ -8684,6 +8693,7 @@ if (debugLevel > -100) return true; // temporarily !
int ref_index = -1; // -1 - last
int [] start_ref_pointers = new int[2]; //{earlist, reference} - reference may be center
boolean first_in_series = true;
/*
// Process UAS logs
UasLogReader uasLogReader = null;
String uas_log_path = quadCLT_main.correctionsParameters.getUasLogsPath();
......@@ -8693,6 +8703,7 @@ if (debugLevel > -100) return true; // temporarily !
uasLogReader.setCameraATR(quadCLT_main.correctionsParameters.cuasCameraATR);
uasLogReader.setUASHomeNed(quadCLT_main.correctionsParameters.cuasSetHome? quadCLT_main.correctionsParameters.cuasUASHome : null);
}
*/
if (debugLevel > -4) {
System.out.println("cuas_centers="+((cuas_centers != null)?("[\""+cuas_centers[0]+"\",\""+cuas_centers[1]+"\"]"):"null"));
}
......@@ -8942,6 +8953,7 @@ if (debugLevel > -100) return true; // temporarily !
case 1:
CuasMultiSeries cuasMultiSeries = new CuasMultiSeries(
clt_parameters, // CLTParameters clt_parameters,
uasLogReader, //UasLogReader uasLogReader,
quadCLT_main, // QuadCLT quadCLT_main,
quadCLT_main.correctionsParameters.linkedCenters,
quadCLT_main.correctionsParameters.x3dModelVersion);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment