Commit 2a60dccc authored by Andrey Filippov's avatar Andrey Filippov

Next snapshot,

parent 360eb591
......@@ -484,7 +484,7 @@ public class CLTParameters {
// sfm-related parameters
public boolean gsmth_sfm_gnd = true; // use smooth ground for SfM DSI
public boolean gsmth_sfm_deviate = true; // enable SfM deviations from the plain
public double gsmth_sfm_gain = 10.0; // minimal SfM gain to enable deviations from the smooth level
public double gsmth_sfm_gain = 8; // 10.0; // minimal SfM gain to enable deviations from the smooth level
public int gsmth_sfm_rad = 3; // minimal radius (+/-, in tiles) to have sufficient SfM gain (skip margins)
public int gsmth_sfm_lev = 2; // start this radius level smaller if SfM is available (2 - hstep=1, ...)
public double gsmth_str_bump = 0.55; // enable bumps above level stronger than this
......
package com.elphel.imagej.orthomosaic;
import java.awt.Rectangle;
import com.elphel.imagej.gpu.GPUTileProcessor;
public class FineXYCorr {
public double [] top_left_metric; // relative to the reference image vertical point, normally negative
public int width_tiles;
public int height_tiles;
public double warp_pix_size;
public double warp_tile_size;
public double render_div_tile;
public double pix_div_render;
public double [] render_origin = new double[2]; // vertical point of the first image in render pixels
public double [][] warp_xy; // [width_tiles*height_tiles][2], values in pixel shift, data on tiles grid
public Rectangle render_woi;
public double render_pix_size; // in meters
public double [] render_tl; // top left corner in render pixels for the top-left corner of the warp array
public FineXYCorr (
int zoom_lev,
int width,
double [] tl_metric,// relative to the reference image vertical point, normally negative
double [][] vf) { // [tiles][2], may have nulls
warp_pix_size = OrthoMap.getPixelSizeMeters (zoom_lev); // meters
warp_tile_size = warp_pix_size * GPUTileProcessor.DTT_SIZE;
width_tiles = width;
height_tiles = vf.length/width_tiles;
top_left_metric = tl_metric;
warp_xy = vf;
}
public void setRender(int zoom_lev,
double px0, // in render pixels
double py0) {
render_pix_size = OrthoMap.getPixelSizeMeters (zoom_lev); // meters
render_origin = new double [] {px0,py0};
render_tl = new double[]{
render_origin[0] + top_left_metric[0] / render_pix_size,
render_origin[1] + top_left_metric[1] / render_pix_size};
render_div_tile = render_pix_size/warp_tile_size;
pix_div_render = warp_pix_size/render_pix_size;
}
/**
* Return rectangular area where this instance's warp changes
* rendering of the (second image). Use to optimize accesses to getCorrectedXY
* @return Rectangle that contains all the render pixels that may be warped
*/
public Rectangle getRenderWOI (){
int x0 = (int) Math.floor(render_tl[0]);
int y0 = (int) Math.floor(render_tl[1]);
int x1 = (int) Math.ceil(render_tl[0] + warp_tile_size*(width_tiles-1)/render_pix_size);
int y1 = (int) Math.ceil(render_tl[1] + warp_tile_size*((warp_xy.length/width_tiles)-1)/render_pix_size);
return new Rectangle(x0,y0,x1-x0+1, y1-y0+1);
}
/**
* Apply warping to the x,y pair
* @param xy {X,Y} before and after warping
*/
public void warpXY(// in render pixels
double [] xy) { // no interpolation - using nearest
int ix = (int) Math.round((xy[0] - render_tl[0]) * render_div_tile);
int iy = (int) Math.round((xy[1] - render_tl[1]) * render_div_tile);
if ((ix >= 0) && (ix < width_tiles) && (iy >= 0) && (iy < height_tiles)){
double [] dxdy = warp_xy[ix + width_tiles * iy];
if (dxdy != null) {
xy[0] -= dxdy[0]*pix_div_render;
xy[1] -= dxdy[1]*pix_div_render;
}
}
}
public double [] getWarp(
double x,
double y) {
int ix = (int) Math.round((x - render_tl[0]) * render_div_tile);
int iy = (int) Math.round((y- render_tl[1]) * render_div_tile);
if ((ix >= 0) && (ix < width_tiles) && (iy >= 0) && (iy < height_tiles)){
double [] dxdy = warp_xy[ix + width_tiles * iy];
if (dxdy != null) {
return new double [] {
dxdy[0]*pix_div_render,
dxdy[1]*pix_div_render};
}
}
return new double [2];
}
}
......@@ -32,6 +32,7 @@ import com.elphel.imagej.readers.ImagejJp4Tiff;
import com.elphel.imagej.tileprocessor.ImageDtt;
import com.elphel.imagej.tileprocessor.IntersceneMatchParameters;
import com.elphel.imagej.tileprocessor.QuadCLT;
import com.elphel.imagej.tileprocessor.TileNeibs;
import Jama.Matrix;
import ij.ImagePlus;
......@@ -40,6 +41,7 @@ import ij.Prefs;
import ij.gui.PointRoi;
import ij.plugin.filter.AVI_Writer;
import ij.plugin.filter.GaussianBlur;
import ij.process.ColorProcessor;
import ij.process.FloatProcessor;
import ij.process.ImageConverter;
import ij.process.ImageProcessor;
......@@ -76,8 +78,7 @@ public class OrthoMap implements Comparable <OrthoMap>, Serializable{
public double averageRawPixel = Double.NaN; // measure of scene temperature
public transient double averageImagePixel = Double.NaN; // average image pixel value (to combine with raw)
transient HashMap <Integer, FloatImageData> images;
HashMap <Double, PairwiseOrthoMatch> pairwise_matches;
HashMap <String, PairwiseOrthoMatch> pairwise_matches;
private void writeObject(ObjectOutputStream oos) throws IOException {
oos.defaultWriteObject();
......@@ -140,6 +141,15 @@ public class OrthoMap implements Comparable <OrthoMap>, Serializable{
return orig_height;
}
public void setMatch(
String name,
PairwiseOrthoMatch match) {
pairwise_matches.put(name, match);
}
public PairwiseOrthoMatch getMatch(String name) {
return pairwise_matches.get(name);
}
public int getOriginalZoomLevel() {
return orig_zoom_level;
}
......@@ -240,7 +250,7 @@ public class OrthoMap implements Comparable <OrthoMap>, Serializable{
orig_zoom_valid = FloatImageData.isZoomValid(orig_pix_meters);
need_extra_zoom = FloatImageData.needZoomIn(orig_pix_meters);
images = new HashMap <Integer, FloatImageData>();
pairwise_matches = new HashMap<Double, PairwiseOrthoMatch>();
pairwise_matches = new HashMap<String, PairwiseOrthoMatch>();
averageRawPixel = Double.NaN; // measure of scene temperature
averageImagePixel = Double.NaN; // average image pixel value (to combine with raw)
}
......@@ -420,9 +430,12 @@ public class OrthoMap implements Comparable <OrthoMap>, Serializable{
* @return double [4][2] array [corner number]{x,y}
*/
public double [][] get4SourceCornersMeters(){
FloatImageData orig_image = getImageData();
double width_meters = getWidth() * orig_pix_meters;
double height_meters = getHeight() * orig_pix_meters;
// FloatImageData orig_image = getImageData();
if ((orig_width <=0) || (orig_height <=0)) {
getImageData();
}
double width_meters = orig_width * orig_pix_meters;
double height_meters = orig_height * orig_pix_meters;
return new double[][] { // CW from TL
{ - vert_meters[0], - vert_meters[1]},
{width_meters - vert_meters[0], - vert_meters[1]},
......@@ -673,13 +686,13 @@ public class OrthoMap implements Comparable <OrthoMap>, Serializable{
int ipy = (int) Math.floor(py);
double fpx = px -ipx;
double fpy = py -ipy;
if ((ipx >= 0) && (ipy >= 0) && (ipx < src_width) && (ipy < src_height)) {
if ((ipx >= 0) && (ipy >= 0) && (ipx < (src_width-1)) && (ipy < (src_height-1))) {
int indx00 = ipy*src_width+ipx;
int indx = task.getTileY()*tilesX + task.getTileX();
elev[indx] =
(1.0-fpx)*(1.0-fpy)*felev[indx00] +
( fpx)*(1.0-fpy)*felev[indx00 + 1] +
(1.0-fpx)*( fpy)*felev[indx00 + src_width] +
(1.0-fpx)*( fpy)*felev[indx00 + src_width] + // throwing
( fpx)*( fpy)*felev[indx00 + src_width + 1];
}
}
......@@ -770,6 +783,11 @@ public class OrthoMap implements Comparable <OrthoMap>, Serializable{
int video_crf = 10; // lower - better, larger file size
boolean remove_avi = true;
int debugLevel = 0;
int num_slices = imp.getImageStack().getSize();
int first_slice = 1;
int last_slice = num_slices;
boolean create_image = false; // instead of video
int border_width = 3;
GenericJTabbedDialog gd = new GenericJTabbedDialog("Parameters for video from orthographic imges",1200,800);
gd.addStringField ("Output video path prefix", path_prefix, 180,
"Absolute path prefix for the video (image title will be added).");
......@@ -797,6 +815,12 @@ public class OrthoMap implements Comparable <OrthoMap>, Serializable{
gd.addNumericField("WEBM CRF", video_crf, 0, 4, "", "WEBM compression quality (lower - better, 10 - good).");
gd.addCheckbox ("Remove AVI", remove_avi, "Remove AVI (large file) after WEBM generation.");
gd.addNumericField("Debug level", debugLevel, 0, 4, "", "Debug level (not yet used).");
gd.addMessage("--- parameters for single image generation ---");
gd.addCheckbox ("Create image instead of video", create_image, "Create image instead of video.");
gd.addNumericField("First slice", first_slice, 0, 4, "", "First slice number (starts with 1)");
gd.addNumericField("Last slice", last_slice, 0, 4, "", "Last slice to combine.");
gd.addNumericField("Border width", border_width, 0, 4, "pix", "Uses \"Annotation color\".");
gd.showDialog();
if (gd.wasCanceled()) return;
path_prefix = gd.getNextString();
......@@ -829,7 +853,14 @@ public class OrthoMap implements Comparable <OrthoMap>, Serializable{
video_crf = (int) gd.getNextNumber();
remove_avi = gd.getNextBoolean();
debugLevel = (int) gd.getNextNumber();
create_image=gd.getNextBoolean();
first_slice = (int) gd.getNextNumber();
last_slice = (int) gd.getNextNumber();
border_width = (int) gd.getNextNumber();
if (create_image) {
keep_original = false; // not used
}
ImagePlus imp_um;
if (um_apply) {
imp_um = applyUnsharpMask(
......@@ -841,6 +872,20 @@ public class OrthoMap implements Comparable <OrthoMap>, Serializable{
imp_um = keep_original? imp.duplicate():imp;
}
System.out.println("applyUnsharpMask() DONE!");
if (create_image) {
ImagePlus imp_rgb_single = convertToColorAndCombine(
imp_um, // ImagePlus imp_src,
first_slice, // int first_slice,
last_slice, // int last_slice,
border_width, // int border_width,
mono_range, // double mono_range, // >0 - apply
annotate_color, // Color border_color,
debugLevel); // int debugLevel);
imp_rgb_single.show();
System.out.println("convertToColorAndCombine() DONE!");
return;
}
ImagePlus imp_rgb = convertToColorAnnotate(
imp_um, // ImagePlus imp_src,
false, // boolean keep_original,
......@@ -967,6 +1012,56 @@ public class OrthoMap implements Comparable <OrthoMap>, Serializable{
return imp;
}
public static ImagePlus convertToColorAndCombine(
ImagePlus imp_src,
int first_slice,
int last_slice,
int border_width,
double mono_range, // >0 - apply
Color border_color,
int debugLevel) {
int width = imp_src.getWidth();
int height = imp_src.getHeight();
int [] pixels = new int [width* height];
ImageStack stack = imp_src.getStack();
boolean [] mask = new boolean[pixels.length];
final TileNeibs tnSurface = new TileNeibs(width, height);
for (int nslice = first_slice; nslice <= last_slice; nslice++) {
float [] fpixels = (float[]) stack.getPixels(nslice);
Arrays.fill(mask, false);
for (int i = 0; i <fpixels.length; i++) if (!Float.isNaN(fpixels[i])) {
mask[i]= true;
int v255 = (int) Math.round(255*(fpixels[i] + mono_range/2)/mono_range);
if (v255 < 0) v255= 0;
else if (v255 > 255) {
v255 = 255;
}
pixels[i] = v255 * 0x10101;
}
if (border_width > 0) {
tnSurface.growSelection(
border_width, // grow,
mask, // tiles,
null); // prohibit);
int border_RGB = border_color.getRGB();
for (int i = 0; i <fpixels.length; i++) if (Float.isNaN(fpixels[i]) && mask[i]) {
pixels[i]=border_RGB;
}
}
if (debugLevel >=0) {
System.out.println("Slice "+nslice+" is done. Last is "+last_slice);
}
}
ColorProcessor cp=new ColorProcessor(width,height);
cp.setPixels(pixels);
ImagePlus imp_combo=new ImagePlus(
removeKnownExtension(imp_src.getTitle())+"-collage_"+first_slice+"-"+last_slice,cp);
return imp_combo;
}
public static String removeKnownExtension(String path) {
String [] remove_ext = {".tiff", ".tif", ".avi"};
for (String ext:remove_ext) {
......
......@@ -6,13 +6,22 @@ import java.io.ObjectOutputStream;
import java.io.Serializable;
public class PairwiseOrthoMatch implements Serializable {
public PairwiseOrthoMatch(double [][] affine, double [][] jtj) {
this.affine = affine;
this.jtj = jtj;
}
private static final long serialVersionUID = 1L;
public double [][] affine = new double[2][3];
public transient double [][] jtj = new double [6][6];
public int zoom_lev;
public double rms = Double.NaN;
public PairwiseOrthoMatch() {
}
public PairwiseOrthoMatch(double [][] affine, double [][] jtj, double rms, int zoom_lev) {
this.affine = affine;
this.jtj = jtj;
}
public double [][] getAffine(){
return affine;
}
private void writeObject(ObjectOutputStream oos) throws IOException {
oos.defaultWriteObject();
for (int i = 0; i < jtj.length; i++) {
......@@ -23,6 +32,7 @@ public class PairwiseOrthoMatch implements Serializable {
}
private void readObject(ObjectInputStream ois) throws ClassNotFoundException, IOException {
ois.defaultReadObject();
jtj = new double [6][6]; // readObject does not use constructor!
for (int i = 0; i < jtj.length; i++) {
for (int j = i; j < jtj[i].length; j++) {
jtj[i][j] = (Double) ois.readObject();
......
......@@ -1177,7 +1177,7 @@ public class ImageDtt extends ImageDttCPU {
use_reference_buffer,
wh,
erase_clt); // put results into a "reference" buffer
if (!batch_mode && (globalDebugLevel > 2)) {
if (!batch_mode && (globalDebugLevel > 2000)) {
ComboMatch.renderFromTD (
false, // boolean use_reference,
"img"); //String suffix
......@@ -1406,7 +1406,7 @@ public class ImageDtt extends ImageDttCPU {
};
gpuQuad.setLpfRbg( // constants memory - same for all cameras
lpf_rgb,
globalDebugLevel > -2);
globalDebugLevel > 2);
// gpuQuad.printConstMem("lpf_data", true);
gpuQuad.setTasks( // copy tp_tasks to the GPU memory
......
......@@ -8646,7 +8646,7 @@ ImageDtt.startAndJoin(threads);
* @return scaled down data[]. Each item (tile) is average of the corresponding data[] pixels
* or NaN if it has no non-NaN pixels in a tile.
*/
public static double [] fillNaNsScaleDown(
public static double [] fillNaNsScaleDown( // extendBlueSky
final double [] data,
final boolean [] prohibit,
final int tile_size,
......@@ -8704,7 +8704,7 @@ ImageDtt.startAndJoin(threads);
* @param width input data[] width in pixels (height = data.length/width)
* @param threadsMax maximal number of threads to use
*/
public static void fillNaNsScaleUp(
public static void fillNaNsScaleUp( // extendBlueSky
final double [] data, // will be replaced
final double [] data_scaled,
final boolean [] prohibit,
......@@ -8734,7 +8734,7 @@ ImageDtt.startAndJoin(threads);
}
public static boolean [] fillNaNsGetFixed(
public static boolean [] fillNaNsGetFixed( // never used
final double [] data,
final boolean [] prohibit,
final int threadsMax) { // maximal number of threads to launch
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment