Commit 61af6427 authored by Andrey Filippov's avatar Andrey Filippov

Adding radar annotations

parent fadcab9e
...@@ -4255,7 +4255,7 @@ public class CuasMotion { ...@@ -4255,7 +4255,7 @@ public class CuasMotion {
} }
} }
} }
if (annotate) { if (annotate) { // TODO: use the same as in radar mode?
int text_left = xl + icon_width + space_before_text * scale; int text_left = xl + icon_width + space_before_text * scale;
int text_top = yt + scale * font_size; // text start from the bottom of the first line int text_top = yt + scale * font_size; // text start from the bottom of the first line
ImageProcessor ip = fstack_scenes.getProcessor(nscene+1); ImageProcessor ip = fstack_scenes.getProcessor(nscene+1);
...@@ -4319,6 +4319,86 @@ public class CuasMotion { ...@@ -4319,6 +4319,86 @@ public class CuasMotion {
return imp; return imp;
} }
public static ColorProcessor getIconColorProcessor(
int target_type, // Target location matching UAS flight log: 0; // 0 - unknown, 1 - known, 2 - friend, 3 - foe
boolean scale2x) {
String resource_name = TARGET_ICONS[target_type][scale2x? 1 : 0];
URL resourceUrl = CuasMotion.class.getClassLoader().getResource("graphics/"+resource_name);
Path undetected_resourcePath = null;
try {
undetected_resourcePath = Paths.get(resourceUrl.toURI());
} catch (URISyntaxException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
String path = undetected_resourcePath.toString();
ColorProcessor cp = null;
try {
cp = new ColorProcessor(ImageIO.read(new File(path)));
} catch (IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
return cp;
}
public static int [][] splitColorIcon(int [] icon){
final int [][] icon_rgba = new int [icon.length][4];
for (int i = 0; i < icon_rgba.length; i++) {
for (int s = 0; s < 4; s++) {
icon_rgba[i][s] = (icon[i] >> (8 * s)) & 0xff;
}
}
return icon_rgba;
}
public static void imprintPixelIcon(
int [][] icon,
int [] icon_pix,
int icon_width,
int [] image,
int width,
int xc,
int yc) {
int icon_height = icon.length/icon_width;
int height = image.length/width;
int xl = xc - icon_width/2;
int yt = yc - icon_height/2;
for (int y = 0; y < icon_height; y++) {
int py = yt + y;
if ((py >= 0) && (py < height)) {
for (int x = 0; x< icon_width; x++) {
int px = xl +x;
int dpix = x + y * icon_width;
int ipix = px + py*width;
if ((px >=0) && (px < width)) {
int alpha = icon[dpix][3];
if (alpha > 0) { // alpha
int dp = icon_pix[x + y * icon_width];
if (alpha == 255) {
image[ipix] = dp;
} else {
double k = alpha/255.0;
int img_pix = image[ipix];
int new_pix = 0xff000000;
for (int c = 0; c < 3; c++) {
int rgb = (img_pix >> (8 * c)) & 0xff;
rgb = (int) Math.round((1-k)*rgb + k* icon[dpix][c]);
if (rgb > 255) rgb = 255;
new_pix |= (rgb << (8*c));
}
image[ipix] = new_pix;
}
}
}
}
}
}
return;
}
public static ImagePlus generateRadarImage( public static ImagePlus generateRadarImage(
CLTParameters clt_parameters, CLTParameters clt_parameters,
int annot_mode, // specify bits int annot_mode, // specify bits
...@@ -4364,30 +4444,44 @@ public class CuasMotion { ...@@ -4364,30 +4444,44 @@ public class CuasMotion {
final double ifov = scene.getGeometryCorrection().getIFOV(); final double ifov = scene.getGeometryCorrection().getIFOV();
final double radar_range = clt_parameters.imp.cuas_radar_range; final double radar_range = clt_parameters.imp.cuas_radar_range;
final String font_name = clt_parameters.imp.cuas_font_name; final String font_name = clt_parameters.imp.cuas_font_name;
final int font_size = clt_parameters.imp.cuas_font_size; // final int font_size_radar = clt_parameters.imp.cuas_font_size;
final int font_size_radar = 7; // clt_parameters.imp.cuas_font_size;
final double font_ratio_radar = 1.2; // if 0 - will use default spacing ( ~=1.5)
final int font_type = clt_parameters.imp.cuas_font_type; final int font_type = clt_parameters.imp.cuas_font_type;
final Color text_color = clt_parameters.imp.cuas_text_color;
final Color selected_color = text_color; // clt_parameters.imp.cuas_selected_color;
final boolean transparent_other = clt_parameters.imp.cuas_transparent;
final boolean transparent_uas = clt_parameters.imp.cuas_transparent_uas;
final int target_type = clt_parameters.imp.cuas_target_type; // 0; // 0 - unknown, 1 - known, 2 - friend, 3 - foe
final int uas_type = clt_parameters.imp.cuas_known_type; // 2; // Target location matching UAS flight log: 0; // 0 - unknown, 1 - known, 2 - friend, 3 - foe
final boolean reserve_missing_fields = false; // make a parameter.Reserve a line for requested but missing parameters
final boolean scale2x = true;
final int image_scale = 2; // here always 2 final int image_scale = 2; // here always 2
final int space_before_text = 2 * image_scale; final int space_before_text = 2 * image_scale;
final int width = 540; // calculate final int width = 540; // calculate
final int height = 1024; // calculate final int height = 1024; // calculate
final int radar_height = 970; // calculate final int radar_height = 950; // 970; // calculate
// move to configs: // move to configs:
final int bottom_gap = 10; final int bottom_gap = 10;
final int infinity_gap = 10; // add to radar_height final int infinity_gap = 24; // 10; // add to radar_height
final double ring_step = 100.0; final double ring_step = 100.0;
final double dir_step = 5.0; // degrees final double dir_step = 5.0; // degrees
final int grid_font_size = 7; final int grid_font_size = 7;
final int grid_azimuth_top= 5; // where to put azimuth final int grid_azimuth_top= 5; // where to put azimuth
final int grid_margin= 5; // grid annotation from left/right final int grid_margin= 5; // grid annotation from left/right
final Color rings_color = new Color(140,140,140); // final Color rings_color = new Color(140,140,140);
final Color rings_color = new Color(100,100,100);
final Color uas_color = new Color( 0,100,140); final Color uas_color = new Color( 0,100,140);
final Color target_color = new Color( 0,255,100); final Color target_color = new Color( 0,255,100);
final double uas_radius = 4.0; final double uas_radius = 4.0;
final double target_radius = 1.5; final double target_radius = 2.5;
final boolean annotate_grid = true; final boolean annotate_grid = true;
final Font font_grid = annotate_grid ? (new Font(font_name, font_type, image_scale * grid_font_size)): null; final Font font_grid = annotate_grid ? (new Font(font_name, font_type, image_scale * grid_font_size)): null;
final Font font_target = new Font(font_name, font_type, image_scale * font_size_radar);
final int [] camera_xy0 = new int[] {width/2,height-bottom_gap}; final int [] camera_xy0 = new int[] {width/2,height-bottom_gap};
...@@ -4399,13 +4493,30 @@ public class CuasMotion { ...@@ -4399,13 +4493,30 @@ public class CuasMotion {
color_processors[nscene] = new ColorProcessor(width, height, pixels[nscene]); color_processors[nscene] = new ColorProcessor(width, height, pixels[nscene]);
stack.addSlice(scene_titles[nscene], color_processors[nscene]); stack.addSlice(scene_titles[nscene], color_processors[nscene]);
} }
double camera_az = uasLogReader.getCameraATR()[0]; // final double camera_az = uasLogReader.getCameraATR()[0];
final double [] camera_atr = uasLogReader.getCameraATR();
final ErsCorrection ersCorrection = scene.getErsCorrection();
// prepare target icons
ColorProcessor cp_target_icon = getIconColorProcessor(
target_type, // int target_type, // Target location matching UAS flight log: 0; // 0 - unknown, 1 - known, 2 - friend, 3 - foe
scale2x); // boolean scale2x)
ColorProcessor cp_uas_icon = getIconColorProcessor(
uas_type, // int target_type, // Target location matching UAS flight log: 0; // 0 - unknown, 1 - known, 2 - friend, 3 - foe
scale2x); // boolean scale2x)
final int target_icon_width = cp_target_icon.getWidth();
final int uas_icon_width = cp_uas_icon.getWidth();
final int [] target_icon_pixels = (int[]) cp_target_icon.getPixels();
final int [] uas_icon_pixels = (int[]) cp_uas_icon.getPixels();
final int [][] target_icon_rgba = splitColorIcon(target_icon_pixels);
final int [][] uas_icon_rgba = splitColorIcon(uas_icon_pixels);
createGrid( createGrid(
color_processors[0], // ColorProcessor colorProcessor, color_processors[0], // ColorProcessor colorProcessor,
scene, // QuadCLT scene, scene, // QuadCLT scene,
camera_xy0, // int [] camera_xy0, camera_xy0, // int [] camera_xy0,
radar_height, // int height, radar_height, // int height,
camera_az, // double camera_az, camera_atr[0], // double camera_az,
rings_color, // Color color_grid, rings_color, // Color color_grid,
font_grid, // Font font, // if null = do not annotate font_grid, // Font font, // if null = do not annotate
radar_range, // double max_z, // corresponds to height radar_range, // double max_z, // corresponds to height
...@@ -4416,13 +4527,13 @@ public class CuasMotion { ...@@ -4416,13 +4527,13 @@ public class CuasMotion {
for (int nscene = 1; nscene < num_scenes; nscene++) { for (int nscene = 1; nscene < num_scenes; nscene++) {
System.arraycopy(pixels[0], 0, pixels[nscene], 0, pixels[nscene].length); System.arraycopy(pixels[0], 0, pixels[nscene], 0, pixels[nscene].length);
} }
ImagePlus imp_new = new ImagePlus(title,stack); ImagePlus imp_new = new ImagePlus(title,stack);
final Thread[] threads = ImageDtt.newThreadArray(); final Thread[] threads = ImageDtt.newThreadArray();
final AtomicInteger ai = new AtomicInteger(0); final AtomicInteger ai = new AtomicInteger(0);
final AtomicInteger amax = new AtomicInteger(-1); final AtomicInteger amax = new AtomicInteger(-1);
final int [] uas_tiles = new int[num_seq]; final int [] uas_tiles = new int[num_seq];
// Arrays.fill(uas_tiles, -1);
final int [][] local_tiles = new int [num_seq][]; final int [][] local_tiles = new int [num_seq][];
for (int ithread = 0; ithread < threads.length; ithread++) { for (int ithread = 0; ithread < threads.length; ithread++) {
...@@ -4475,48 +4586,38 @@ public class CuasMotion { ...@@ -4475,48 +4586,38 @@ public class CuasMotion {
ltargets_first_last[ltarg-1] = new int[] {indx_first,indx_last}; ltargets_first_last[ltarg-1] = new int[] {indx_first,indx_last};
} }
} }
final int [] target_ids = new int [ltargets_first_last.length-1]; for (int nscene = 0; nscene < num_scenes; nscene++) {
for (int ltarg = 1; ltarg <= target_ids.length; ltarg++) { color_processors[nscene].setFont(font_target); // to calculate text boxes, later will need to set it for all processors
int nseq = ltargets_first_last[ltarg-1][0];
int ntile = local_tiles[nseq][ltarg]; // 1- based 0 - uas log
double [] target = targets[nseq][ntile];
target_ids[ltarg-1] = (int) target[CuasMotionLMA.RSLT_TARGET_ID];
}
Integer [] targets_order_integer = new Integer [target_ids.length];
for (int i = 0; i < targets_order_integer.length; i++) {
targets_order_integer[i] = i;
}
Arrays.sort(targets_order_integer, new Comparator<Integer>() { //
@Override
public int compare(Integer lhs, Integer rhs) {
return Integer.compare(target_ids[lhs], target_ids[rhs]);
} }
});
// 0-based indices, 0-based values.
int [] targets_order = Arrays.stream(targets_order_integer).mapToInt(Integer::intValue).toArray();
// mark UAS log and targets on the radar // mark UAS log and targets on the radar
// color_processors[0].setFont(font_target); // to calculate text boxes, later will need to set it for all processors
ai.set(0); ai.set(0);
final ErsCorrection ersCorrection = scene.getErsCorrection();
int [] scene_indices = getSceneIndices( int [] scene_indices = getSceneIndices(
key_titles, // String [] key_titles, // corresponding to top targets dimension key_titles, // String [] key_titles, // corresponding to top targets dimension
scene_titles); // String [] scene_titles) { // all titles, one per frame scene_titles); // String [] scene_titles) { // all titles, one per frame
final double kpix = radar_height/radar_range; final double kpix = radar_height/radar_range;
final Rectangle full_wnd = new Rectangle(0,0,width,height);
for (int ithread = 0; ithread < threads.length; ithread++) { for (int ithread = 0; ithread < threads.length; ithread++) {
threads[ithread] = new Thread() { threads[ithread] = new Thread() {
public void run() { public void run() {
int [] conflict_pix = new int [width * height]; int [] conflict_pix = new int [width * height];
ColorProcessor conflict_cp = new ColorProcessor(width, height, conflict_pix); ColorProcessor conflict_cp = new ColorProcessor(width, height, conflict_pix);
for (int nSeq = ai.getAndIncrement(); nSeq < (targets.length - 1); nSeq = ai.getAndIncrement()) { for (int nSeq = ai.getAndIncrement(); nSeq < (targets.length - 1); nSeq = ai.getAndIncrement()) {
// color_processors[nSeq].setFont(font_target); // to calculate text boxes, later will need to set it for all processors
Arrays.fill(conflict_pix, 0); Arrays.fill(conflict_pix, 0);
// for now only for targets longer that 1 series // for now only for targets longer that 1 series
int nscene0 = scene_indices[nSeq]; int nscene0 = scene_indices[nSeq];
int nscene1 = scene_indices[nSeq+1]; int nscene1 = scene_indices[nSeq+1];
double kscene = 1.0/(nscene1-nscene0); double kscene = 1.0/(nscene1-nscene0);
// plot UAS // plot UAS
double [] uas_target0= null, uas_target1=null;
if ((uas_tiles[nSeq] >= 0) && (uas_tiles[nSeq+1] >= 0)) { if ((uas_tiles[nSeq] >= 0) && (uas_tiles[nSeq+1] >= 0)) {
double [] uas_target0= targets[nSeq][uas_tiles[nSeq]]; uas_target0= targets[nSeq][uas_tiles[nSeq]];
double [] uas_target1= targets[nSeq+1][uas_tiles[nSeq+1]]; uas_target1= targets[nSeq+1][uas_tiles[nSeq+1]];
for (int nscene = nscene0; nscene <= nscene1; nscene++) { for (int nscene = nscene0; nscene < nscene1; nscene++) { // threads collide if <=
double k = (nscene - nscene0) * kscene; // 0 when nscene=nscene0 double k = (nscene - nscene0) * kscene; // 0 when nscene=nscene0
double px = interpolate(uas_target0[CuasMotionLMA.RSLT_FL_PX], uas_target1[CuasMotionLMA.RSLT_FL_PX],k); double px = interpolate(uas_target0[CuasMotionLMA.RSLT_FL_PX], uas_target1[CuasMotionLMA.RSLT_FL_PX],k);
double py = interpolate(uas_target0[CuasMotionLMA.RSLT_FL_PY], uas_target1[CuasMotionLMA.RSLT_FL_PY],k); double py = interpolate(uas_target0[CuasMotionLMA.RSLT_FL_PY], uas_target1[CuasMotionLMA.RSLT_FL_PY],k);
...@@ -4546,16 +4647,44 @@ public class CuasMotion { ...@@ -4546,16 +4647,44 @@ public class CuasMotion {
uas_radius); // double radius) uas_radius); // double radius)
} }
} }
// int get number of visible targets for this scene sequence
int num_visible = 0;
int [] target_ids = new int [local_tiles[nSeq].length]; // ltargets_first_last.length];
for (int i = 1; i < local_tiles[nSeq].length; i++) {
if ((local_tiles[nSeq][i] > 0) && (i < local_tiles[nSeq + 1].length) && (local_tiles[nSeq + 1][i] > 0)) { // visible in both this and next
num_visible++;
int ntile = local_tiles[nSeq][i]; // 1- based 0 - uas log
double [] target = targets[nSeq][ntile];
target_ids[i] = (int) target[CuasMotionLMA.RSLT_TARGET_ID]; // i was out of range
}
}
Integer [] targets_order_integer = new Integer [num_visible];
int indx = 0;
for (int i = 1; i < local_tiles[nSeq].length; i++) {
if ((local_tiles[nSeq][i] > 0) && (i < local_tiles[nSeq + 1].length) && (local_tiles[nSeq + 1][i] > 0)) { // visible in both this and next
targets_order_integer[indx++] = i;
}
}
Arrays.sort(targets_order_integer, new Comparator<Integer>() { //
@Override
public int compare(Integer lhs, Integer rhs) {
return Integer.compare(target_ids[lhs], target_ids[rhs]);
}
});
// 0-based indices, 0-based values.
final int [] targets_order = Arrays.stream(targets_order_integer).mapToInt(Integer::intValue).toArray();
// plot targets // plot targets
int icon_width = (int) Math.round(2 * target_radius); // use icon width when used // int icon_width = (int) Math.round(2 * target_radius); // use icon width when used
int [][][] target_coord = new int [local_tiles[nSeq].length-1][][]; // save centers of targets and text coordinates // now target_coord is ordered from highest priority to the lowest
Rectangle [][] annot_boxes = new Rectangle[local_tiles[nSeq].length-1][]; double [][][] target_coord = new double [local_tiles[nSeq].length-1][][]; // save centers of targets and text coordinates
Rectangle [][] annot_boxes = new Rectangle[local_tiles[nSeq].length-1][]; // order will match target_coord order
String [][] annots = new String [local_tiles[nSeq].length-1][]; String [][] annots = new String [local_tiles[nSeq].length-1][];
// add integer color and bg_color (-1 - transparent) // add integer color and bg_color (-1 - transparent)
// iltarget: 0 - highest priority
for (int iltarget = 0; iltarget < targets_order.length; iltarget ++) { for (int iltarget = 0; iltarget < targets_order.length; iltarget ++) {
int ltarget = targets_order[iltarget] + 1; int ltarget = targets_order[iltarget]; // + 1;
// change
// for (int ltarget = 1; ltarget < local_tiles[nSeq].length; ltarget++) {
if ( (local_tiles[nSeq].length > ltarget) && if ( (local_tiles[nSeq].length > ltarget) &&
(local_tiles[nSeq+1].length > ltarget) && (local_tiles[nSeq+1].length > ltarget) &&
(local_tiles[nSeq][ltarget] >=0) && (local_tiles[nSeq][ltarget] >=0) &&
...@@ -4600,43 +4729,187 @@ public class CuasMotion { ...@@ -4600,43 +4729,187 @@ public class CuasMotion {
radar_x1 = camera_xy0[0] + (px1 - sensor_width/2) * ifov * (radar_height + infinity_gap); radar_x1 = camera_xy0[0] + (px1 - sensor_width/2) * ifov * (radar_height + infinity_gap);
radar_y1 = camera_xy0[1] - (radar_height + infinity_gap); radar_y1 = camera_xy0[1] - (radar_height + infinity_gap);
} }
target_coord[ltarget-1] = new int [nscene1-nscene0+1][2]; // target_coord[ltarget-1] = new double [nscene1-nscene0+1][2];
for (int nscene = nscene0; nscene <= nscene1; nscene++) { target_coord[iltarget] = new double [nscene1-nscene0+1][2];
annot_boxes[iltarget] = new Rectangle[nscene1-nscene0+1];
annots[iltarget] = new String[nscene1-nscene0+1];
for (int nscene = nscene0; nscene < nscene1; nscene++) { // threads collide if <= (when drawing, here just not needed)
double k = (nscene - nscene0) * kscene; // 0 when nscene=nscene0 double k = (nscene - nscene0) * kscene; // 0 when nscene=nscene0
double radar_x = interpolate(radar_x0, radar_x1, k); double radar_x = interpolate(radar_x0, radar_x1, k);
double radar_y = interpolate(radar_y0, radar_y1, k); double radar_y = interpolate(radar_y0, radar_y1, k);
target_coord[ltarget-1][nscene-nscene0][0] = (int) Math.round(radar_x); String annot_txt = getAnnotationText(
target_coord[ltarget-1][nscene-nscene0][1] = (int) Math.round(radar_y); clt_parameters, // CLTParameters clt_parameters,
annot_mode, // int annot_mode,
reserve_missing_fields, // boolean reserve_missing_fields, // make a parameter.Reserve a line for requested but missing parameters
ntile0, // int ntile0,
ntile1, // int ntile1,
target0, // double [] target0,
target1, // double [] target1,
uas_target0, // double [] uas_target0,
uas_target1, // double [] uas_target1,
k, // double k,
camera_atr, // double [] camera_atr,
ersCorrection); // ErsCorrection ersCorrection)
target_coord[iltarget][nscene-nscene0][0] = radar_x; // null pointer
target_coord[iltarget][nscene-nscene0][1] = radar_y;
annots[iltarget][nscene-nscene0] = annot_txt;
Rectangle text_box = getStringBounds(color_processors[nscene], annot_txt, font_ratio_radar);
annot_boxes[iltarget][nscene-nscene0] = text_box;
}
}
}
// plot all targets for the series on the same conflict_cp (for later use
for (int iltarget = 0; iltarget < targets_order.length; iltarget ++) {
for (int nscene = nscene0; nscene < nscene1; nscene++) { // threads collide if <=
drawCircle( drawCircle(
color_processors[nscene], // ColorProcessor colorProcessor, conflict_cp, // ColorProcessor colorProcessor,
target_color, // Color color, // or null target_color, // Color color, // or null
radar_x, // double xc, target_coord[iltarget][nscene-nscene0][0], // double xc,
radar_y, // double yc, target_coord[iltarget][nscene-nscene0][1], // double yc,
target_radius); // double radius) target_radius); // double radius)
}
}
// now plot all text in reverse order
// now plot all target circles in reverse order
// TODO: adjust boxes to avoid overlaps - use conflict_cp to avoid targets
for (int iltarget = targets_order.length - 1; iltarget >=0; iltarget --) {
int target_id = target_ids[targets_order[iltarget]];
boolean is_uas = target_id == CuasMultiSeries.TARGET_INDEX_UAS;
boolean target_text_transparent = is_uas ? transparent_uas : transparent_other;
int icon_width = is_uas ? uas_icon_width: target_icon_width;
for (int nscene = nscene0; nscene < nscene1; nscene++) { // threads collide if <=
color_processors[nscene].setColor(is_uas ? selected_color: text_color);
String annot_txt=annots[iltarget][nscene-nscene0];
int text_left = (int)Math.round(target_coord[iltarget][nscene-nscene0][0] + icon_width/2 + space_before_text);
int text_top = (int)Math.round(target_coord[iltarget][nscene-nscene0][1]);
Rectangle abs_box = annot_boxes[iltarget][nscene-nscene0];
abs_box.x += text_left;
abs_box.y += text_top;
if (full_wnd.contains(abs_box) ) {
Color bg_color = target_text_transparent ? null : Color.BLACK;
drawString(
color_processors[nscene], // ImageProcessor ip,
annot_txt, // String txt,
text_left, // int left,
text_top, // int top,
bg_color, // Color bgColor, // null - transparent
font_ratio_radar); // double ratio) { // NaN or 0 - standard
}
}
}
// plot icons
for (int iltarget = 0; iltarget < targets_order.length; iltarget ++) {
int target_id = target_ids[targets_order[iltarget]];
boolean is_uas = target_id == CuasMultiSeries.TARGET_INDEX_UAS;
int [] icon_pixels = is_uas ? uas_icon_pixels : target_icon_pixels;
int [][] icon_rgba = is_uas ? uas_icon_rgba : target_icon_rgba;
int icon_width = is_uas ? uas_icon_width: target_icon_width;
for (int nscene = nscene0; nscene < nscene1; nscene++) {
int ixc= (int) Math.round(target_coord[iltarget][nscene-nscene0][0]);
int iyc= (int) Math.round(target_coord[iltarget][nscene-nscene0][1]);
// int [] image_pixels = (int []) color_processors[nscene].getPixels();
imprintPixelIcon(
icon_rgba, // int [][] icon,
icon_pixels, // int [] icon_pix,
icon_width, // int icon_width,
pixels[nscene], // int [] image,
width, // int width,
ixc, // int xc,
iyc); // int yc)
}
}
// now plot targets (any order, just after everything else). Maybe use a special color for the UAS
for (int iltarget = 0; iltarget < targets_order.length; iltarget ++) {
for (int nscene = nscene0; nscene < nscene1; nscene++) { // threads collide if <=
drawCircle( drawCircle(
conflict_cp, // ColorProcessor colorProcessor, color_processors[nscene], // ColorProcessor colorProcessor,
target_color, // Color color, // or null target_color, // Color color, // or null
radar_x, // double xc, target_coord[iltarget][nscene-nscene0][0], // double xc,
radar_y, // double yc, target_coord[iltarget][nscene-nscene0][1], // double yc,
target_radius); // double radius) target_radius); // double radius)
} }
} }
} }
} }
}
}; };
} }
ImageDtt.startAndJoin(threads); ImageDtt.startAndJoin(threads);
return imp_new; return imp_new;
} }
/**
* ImageProcessor getStringBounds() does not work with the multiline text
* @param ip
* @param annot_txt
* @return
*/
public static Rectangle getStringBounds(
ImageProcessor ip,
String txt) {
return getStringBounds(
ip, // ImageProcessor ip,
txt, // String txt,
0); // double ratio)
}
public static Rectangle getStringBounds(
ImageProcessor ip,
String txt,
double ratio) {
int font_size = ip.getFont().getSize();
int line_space = (ratio > 0) ? ((int) Math.round(font_size * ratio)) : ip.getFontMetrics().getHeight();
String lines[] = txt.split("\\r?\\n");
Rectangle bb = ip.getStringBounds(lines[0]);
for (int i = 1; i < lines.length;i++) {
Rectangle bb_line = ip.getStringBounds(lines[i]);
bb_line.y += i * line_space;
bb.add(bb_line);
}
return bb;
}
public static void drawString(
ImageProcessor ip,
String txt,
int left,
int top,
Color bgColor, // null - transparent
double ratio) { // NaN or 0 - standard
int font_size = ip.getFont().getSize();
int line_space = (ratio > 0) ? ((int) Math.round(font_size * ratio)) : ip.getFontMetrics().getHeight();
String lines[] = txt.split("\\r?\\n");
for (int i = 0; i < lines.length;i++) {
int line_top = top + i * line_space;
if (bgColor == null) {
ip.drawString(lines[i], left, line_top); // transparent
} else {
ip.drawString(lines[i], left, line_top, bgColor); // transparent
}
}
}
/*
if (target_text_transparent) {
color_processors[nscene].drawString(annot_txt, text_left, text_top); // transparent.
} else {
color_processors[nscene].drawString(annot_txt, text_left, text_top, Color.BLACK); // solid color
}
*/
public static String getAnnotationText( public static String getAnnotationText(
CLTParameters clt_parameters, CLTParameters clt_parameters,
int annot_mode, // specify bits int annot_mode, // specify bits
boolean reserve_missing_fields, // make a parameter.Reserve a line for requested but missing parameters
int ntile0, int ntile0,
int ntile1, int ntile1,
double [] target0, double [] target0,
double [] target1, double [] target1,
double [] uas_target0,
double [] uas_target1,
double k, double k,
double [] camera_atr, double [] camera_atr,
ErsCorrection ersCorrection) { ErsCorrection ersCorrection) {
...@@ -4644,8 +4917,6 @@ public class CuasMotion { ...@@ -4644,8 +4917,6 @@ public class CuasMotion {
boolean show_inf_gt = clt_parameters.imp.cuas_show_inf_gt; // false; // Use ">max" instead of infinity symbol boolean show_inf_gt = clt_parameters.imp.cuas_show_inf_gt; // false; // Use ">max" instead of infinity symbol
double max_annot_range = clt_parameters.imp.cuas_rng_limit; // 5000, maybe make a separate parameter double max_annot_range = clt_parameters.imp.cuas_rng_limit; // 5000, maybe make a separate parameter
double max_axial_range = clt_parameters.imp.cuas_radar_range; // may be a separate - maximal range for axial velocity/heading double max_axial_range = clt_parameters.imp.cuas_radar_range; // may be a separate - maximal range for axial velocity/heading
boolean reserve_missing_fields = true; // make a parameter.Reserve a line for requested but missing parameters
double ifov = ersCorrection.getIFOV(); double ifov = ersCorrection.getIFOV();
int sensor_width = ersCorrection.getSensorWH()[0]; int sensor_width = ersCorrection.getSensorWH()[0];
int tileSize = GPUTileProcessor.DTT_SIZE; int tileSize = GPUTileProcessor.DTT_SIZE;
...@@ -4666,16 +4937,23 @@ public class CuasMotion { ...@@ -4666,16 +4937,23 @@ public class CuasMotion {
if (range > max_annot_range) { if (range > max_annot_range) {
range = Double.POSITIVE_INFINITY; range = Double.POSITIVE_INFINITY;
} }
double true_range = Double.isNaN (target0[CuasMotionLMA.RSLT_FL_RANGE]) ? Double.NaN: boolean is_uas = (uas_target0 != null) && (uas_target1 != null) && (id == CuasMultiSeries.TARGET_INDEX_UAS);
interpolate (target0[CuasMotionLMA.RSLT_FL_RANGE],target1[CuasMotionLMA.RSLT_TARGET_ID],k);
double true_range = is_uas ? interpolate (uas_target0[CuasMotionLMA.RSLT_FL_RANGE],uas_target1[CuasMotionLMA.RSLT_FL_RANGE],k): Double.NaN;
double disparity = 0; double disparity = 0;
if (Double.isInfinite(range)) { if (!Double.isInfinite(range)) {
disparity = ersCorrection.getDisparityFromZ(range); disparity = ersCorrection.getDisparityFromZ(range);
} }
double [] xyz = null, wxyz = null; // double [] xyz = null;
double [] wxyz = null;
double agl = Double.NaN; double agl = Double.NaN;
double az = Double.NaN; double az = Double.NaN;
double [][] icamera_atr = ErsCorrection.invertXYZATR(
OpticalFlow.ZERO3, // double [] source_xyz,
camera_atr); // double [] source_atr)
if (disparity > 0) { if (disparity > 0) {
/*
xyz = ersCorrection.getWorldCoordinatesERS( // ersCorrection - reference xyz = ersCorrection.getWorldCoordinatesERS( // ersCorrection - reference
px, // double px, // pixel coordinate X in the reference view px, // double px, // pixel coordinate X in the reference view
py, // double py, // pixel coordinate Y in the reference view py, // double py, // pixel coordinate Y in the reference view
...@@ -4683,20 +4961,42 @@ public class CuasMotion { ...@@ -4683,20 +4961,42 @@ public class CuasMotion {
true, // boolean distortedView, // This camera view is distorted (diff.rect), false - rectilinear true, // boolean distortedView, // This camera view is distorted (diff.rect), false - rectilinear
OpticalFlow.ZERO3, // double [] reference_xyz, // this view position in world coordinates (typically ZERO3) OpticalFlow.ZERO3, // double [] reference_xyz, // this view position in world coordinates (typically ZERO3)
OpticalFlow.ZERO3); // double [] reference_atr, // this view orientation relative to world frame (typically ZERO3) OpticalFlow.ZERO3); // double [] reference_atr, // this view orientation relative to world frame (typically ZERO3)
*/
wxyz = ersCorrection.getWorldCoordinatesERS( // ersCorrection - reference wxyz = ersCorrection.getWorldCoordinatesERS( // ersCorrection - reference
px, // double px, // pixel coordinate X in the reference view px, // double px, // pixel coordinate X in the reference view
py, // double py, // pixel coordinate Y in the reference view py, // double py, // pixel coordinate Y in the reference view
disparity, // double disparity, // reference disparity disparity, // double disparity, // reference disparity
true, // boolean distortedView, // This camera view is distorted (diff.rect), false - rectilinear true, // boolean distortedView, // This camera view is distorted (diff.rect), false - rectilinear
OpticalFlow.ZERO3, // double [] reference_xyz, // this view position in world coordinates (typically ZERO3) OpticalFlow.ZERO3, // double [] reference_xyz, // this view position in world coordinates (typically ZERO3)
camera_atr); // double [] reference_atr, // this view orientation relative to world frame (typically ZERO3) icamera_atr[1]); // double [] reference_atr, // this view orientation relative to world frame (typically ZERO3)
agl = wxyz[1]; agl = wxyz[1];
az = Math.atan2(wxyz[2], -wxyz[0]); // TODO: check sign az = Math.atan2(wxyz[0], -wxyz[2]); // TODO: check sign
} else { } else {
az = (px1 - sensor_width/2) * ifov; // + camera_atr; /*
xyz = ersCorrection.getWorldCoordinatesERS( // ersCorrection - reference
px, // double px, // pixel coordinate X in the reference view
py, // double py, // pixel coordinate Y in the reference view
disparity, // double disparity, // reference disparity
true, // boolean distortedView, // This camera view is distorted (diff.rect), false - rectilinear
OpticalFlow.ZERO3, // double [] reference_xyz, // this view position in world coordinates (typically ZERO3)
OpticalFlow.ZERO3); // double [] reference_atr, // this view orientation relative to world frame (typically ZERO3)
wxyz = ersCorrection.getWorldCoordinatesERS( // ersCorrection - reference
px, // double px, // pixel coordinate X in the reference view
py, // double py, // pixel coordinate Y in the reference view
disparity, // double disparity, // reference disparity
true, // boolean distortedView, // This camera view is distorted (diff.rect), false - rectilinear
OpticalFlow.ZERO3, // double [] reference_xyz, // this view position in world coordinates (typically ZERO3)
icamera_atr[1]); // double [] reference_atr, // this view orientation relative to world frame (typically ZERO3)
if (wxyz != null) {
az = Math.atan2(wxyz[0], -wxyz[2]); // TODO: check sign
}
*/
az = (px1 - sensor_width/2) * ifov + camera_atr[0];
} }
while (az <0 ) az += 2*Math.PI; while (az <0 ) az += 2*Math.PI;
double az_deg = az * 1280/Math.PI; while (az > 360 ) az -= 2*Math.PI;
double az_deg = az * 180/Math.PI;
double vel_away = interpolate (target0[CuasMotionLMA.RSLT_VEL_AWAY], target1[CuasMotionLMA.RSLT_VEL_AWAY], k); double vel_away = interpolate (target0[CuasMotionLMA.RSLT_VEL_AWAY], target1[CuasMotionLMA.RSLT_VEL_AWAY], k);
if (range > max_axial_range) { if (range > max_axial_range) {
vel_away = 0; vel_away = 0;
...@@ -4713,7 +5013,7 @@ public class CuasMotion { ...@@ -4713,7 +5013,7 @@ public class CuasMotion {
StringBuffer sb = new StringBuffer(); StringBuffer sb = new StringBuffer();
if ((annot_mode & (1 << ANNOT_ID)) != 0){ if ((annot_mode & (1 << ANNOT_ID)) != 0){
sb.append(String.format(" ID %03d\n", id)); sb.append(String.format("ID %03d\n", id));
} }
if ((annot_mode & (1 << ANNOT_RANGE)) != 0){ if ((annot_mode & (1 << ANNOT_RANGE)) != 0){
if (range <= max_annot_range) { // handles POSITIVE_INFINITY if (range <= max_annot_range) { // handles POSITIVE_INFINITY
...@@ -4730,7 +5030,7 @@ public class CuasMotion { ...@@ -4730,7 +5030,7 @@ public class CuasMotion {
} }
if ((annot_mode & (1 << ANNOT_TRANG)) != 0){ if ((annot_mode & (1 << ANNOT_TRANG)) != 0){
if (!Double.isNaN(true_range)) { if (!Double.isNaN(true_range)) {
sb.append(String.format("TRNG%4.0f\n", range)); sb.append(String.format("TRNG%4.0f\n", true_range));
} else if (reserve_missing_fields){ } else if (reserve_missing_fields){
sb.append("\n"); sb.append("\n");
} }
...@@ -5028,27 +5328,7 @@ public class CuasMotion { ...@@ -5028,27 +5328,7 @@ public class CuasMotion {
String s = (d > 0)? "+" : ( (d <0)? "-":" "); String s = (d > 0)? "+" : ( (d <0)? "-":" ");
return s+ String.format(format, Math.abs(d)); return s+ String.format(format, Math.abs(d));
} }
/*
public static String getTargetText(
CLTParameters clt_parameters,
double [] target) {
double [][] az_el_oaz_oel= getPixToAzElev(
clt_parameters, // CLTParameters clt_parameters,
target[TARGET_X], // double target_x,
target[TARGET_Y], // double target_y,
target[TARGET_VX], // double target_vx,
target[TARGET_VY]); // double target_vy);
String number_format = "%3.0f";
String omega_format = "%3.1f";
String omega = "\u03A9";
String txt = "";
txt += " AZ "+String.format(number_format,az_el_oaz_oel[0][0])+"\n";
txt += " EL "+ getSignedDouble(az_el_oaz_oel[0][1],number_format)+"\n";
txt += omega+"AZ "+getSignedDouble(az_el_oaz_oel[1][0],omega_format)+"\n";
txt += omega+"EL "+getSignedDouble(az_el_oaz_oel[1][1],omega_format);
return txt;
}
*/
public static String getTargetText( public static String getTargetText(
CLTParameters clt_parameters, CLTParameters clt_parameters,
GeometryCorrection gc, GeometryCorrection gc,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment