Commit 364684d5 authored by Oleg Dzhimiev's avatar Oleg Dzhimiev

Merge branch 'master' of git.elphel.com:Elphel/python3-imagej-tiff

parents 39c85178 464cacc9
...@@ -625,7 +625,16 @@ class ExploreData: ...@@ -625,7 +625,16 @@ class ExploreData:
if set_ds is None: if set_ds is None:
set_ds = self.train_ds set_ds = self.train_ds
try:
os.makedirs(os.path.dirname(tfr_filename))
print("Created directory "+os.path.dirname(tfr_filename))
except:
print("Directory "+os.path.dirname(tfr_filename)+" already exists, using it")
pass
#skip writing if file exists - it will be possible to continue or run several instances
if os.path.exists(tfr_filename):
print(tfr_filename+" already exists, skipping generation. Please remove and re-run this program if you want to regenerate the file")
return
writer = tf.python_io.TFRecordWriter(tfr_filename) writer = tf.python_io.TFRecordWriter(tfr_filename)
#$ files_list = [self.files_train, self.files_test][test_set] #$ files_list = [self.files_train, self.files_test][test_set]
seed_list = np.arange(len(files_list)) seed_list = np.arange(len(files_list))
...@@ -666,7 +675,7 @@ class ExploreData: ...@@ -666,7 +675,7 @@ class ExploreData:
example = tf.train.Example(features=tf.train.Features(feature=d_feature)) example = tf.train.Example(features=tf.train.Features(feature=d_feature))
writer.write(example.SerializeToString()) writer.write(example.SerializeToString())
if (self.debug_level > 0): if (self.debug_level > 0):
print("Scene %d of %d -> %s"%(nscene, len(seed_list), tfr_filename)) print_time("Scene %d of %d -> %s"%(nscene, len(seed_list), tfr_filename))
writer.close() writer.close()
sys.stdout.flush() sys.stdout.flush()
...@@ -742,38 +751,30 @@ if __name__ == "__main__": ...@@ -742,38 +751,30 @@ if __name__ == "__main__":
try: try:
topdir_train = sys.argv[1] topdir_train = sys.argv[1]
except IndexError: except IndexError:
topdir_train = "/mnt/dde6f983-d149-435e-b4a2-88749245cc6c/home/eyesis/x3d_data/data_sets/train"#test" #all/" # topdir_train = "/mnt/dde6f983-d149-435e-b4a2-88749245cc6c/home/eyesis/x3d_data/data_sets/train"#test" #all/"
topdir_train = "/home/eyesis/x3d_data/data_sets/train_mlr32_18a"
try: try:
topdir_test = sys.argv[2] topdir_test = sys.argv[2]
except IndexError: except IndexError:
topdir_test = "/mnt/dde6f983-d149-435e-b4a2-88749245cc6c/home/eyesis/x3d_data/data_sets/test"#test" #all/" # topdir_test = "/mnt/dde6f983-d149-435e-b4a2-88749245cc6c/home/eyesis/x3d_data/data_sets/test"#test" #all/"
topdir_test = "/home/eyesis/x3d_data/data_sets/test_mlr32_18a"
try: try:
pathTFR = sys.argv[3] pathTFR = sys.argv[3]
except IndexError: except IndexError:
pathTFR = "/mnt/dde6f983-d149-435e-b4a2-88749245cc6c/home/eyesis/x3d_data/data_sets/tf_data_3x3b" #no trailing "/" # pathTFR = "/mnt/dde6f983-d149-435e-b4a2-88749245cc6c/home/eyesis/x3d_data/data_sets/tf_data_3x3b" #no trailing "/"
pathTFR = "/home/eyesis/x3d_data/data_sets/tf_data_5x5" #no trailing "/"
try: try:
ml_subdir = sys.argv[4] ml_subdir = sys.argv[4]
except IndexError: except IndexError:
ml_subdir = "ml" # ml_subdir = "ml"
ml_subdir = "mlr32_18a"
# pathTFR = "/mnt/dde6f983-d149-435e-b4a2-88749245cc6c/home/eyesis/x3d_data/data_sets/tf_data_3x3b" #no trailing "/"
test_corr = '/home/eyesis/x3d_data/models/var_main/www/html/x3domlet/models/all-clean/overlook/1527257933_150165/v04/mlr32_18a/1527257933_150165-ML_DATA-32B-O-FZ0.05-MAIN.tiff' test_corr = '/home/eyesis/x3d_data/models/var_main/www/html/x3domlet/models/all-clean/overlook/1527257933_150165/v04/mlr32_18a/1527257933_150165-ML_DATA-32B-O-FZ0.05-MAIN.tiff'
scene = os.path.basename(test_corr)[:17]
scene_version= os.path.basename(os.path.dirname(os.path.dirname(test_corr)))
fname =scene+'-'+scene_version
img_filenameTFR = os.path.join(pathTFR,'img',fname)
writeTFRewcordsImageTiles(test_corr, img_filenameTFR)
pass
exit(0)
#Parameters to generate neighbors data. Set radius to 0 to generate single-tile #Parameters to generate neighbors data. Set radius to 0 to generate single-tile
RADIUS = 1 RADIUS = 2 # 5x5
MIN_NEIBS = (2 * RADIUS + 1) * (2 * RADIUS + 1) # All tiles valid == 9 MIN_NEIBS = (2 * RADIUS + 1) * (2 * RADIUS + 1) # All tiles valid == 9
VARIANCE_THRESHOLD = 1.5 VARIANCE_THRESHOLD = 1.5
NUM_TRAIN_SETS = 8 NUM_TRAIN_SETS = 8
...@@ -944,6 +945,13 @@ if __name__ == "__main__": ...@@ -944,6 +945,13 @@ if __name__ == "__main__":
fpath = test_filenameTFR +("TEST_R%d_GT%4.1f"%(RADIUS,VARIANCE_THRESHOLD)) fpath = test_filenameTFR +("TEST_R%d_GT%4.1f"%(RADIUS,VARIANCE_THRESHOLD))
ex_data.writeTFRewcordsEpoch(fpath, ml_list = ml_list_test, files_list = ex_data.files_test, set_ds= ex_data.test_ds, radius = RADIUS) ex_data.writeTFRewcordsEpoch(fpath, ml_list = ml_list_test, files_list = ex_data.files_test, set_ds= ex_data.test_ds, radius = RADIUS)
plt.show() plt.show()
# pathTFR = "/mnt/dde6f983-d149-435e-b4a2-88749245cc6c/home/eyesis/x3d_data/data_sets/tf_data_3x3b" #no trailing "/"
# test_corr = '/home/eyesis/x3d_data/models/var_main/www/html/x3domlet/models/all-clean/overlook/1527257933_150165/v04/mlr32_18a/1527257933_150165-ML_DATA-32B-O-FZ0.05-MAIN.tiff'
scene = os.path.basename(test_corr)[:17]
scene_version= os.path.basename(os.path.dirname(os.path.dirname(test_corr)))
fname =scene+'-'+scene_version
img_filenameTFR = os.path.join(pathTFR,'img',fname)
writeTFRewcordsImageTiles(test_corr, img_filenameTFR)
pass pass
exit(0)
This diff is collapsed.
This diff is collapsed.
...@@ -31,7 +31,7 @@ FILES_PER_SCENE = 5 # number of random offset files for the scene to select f ...@@ -31,7 +31,7 @@ FILES_PER_SCENE = 5 # number of random offset files for the scene to select f
#MIN_BATCH_CHOICES = 10 # minimal number of tiles in a file for each bin to select from #MIN_BATCH_CHOICES = 10 # minimal number of tiles in a file for each bin to select from
#MAX_BATCH_FILES = 10 #maximal number of files to use in a batch #MAX_BATCH_FILES = 10 #maximal number of files to use in a batch
#MAX_EPOCH = 500 #MAX_EPOCH = 500
LR = 1e-4 # learning rate LR = 1e-3 # learning rate
LR100 = 1e-4 LR100 = 1e-4
USE_CONFIDENCE = False USE_CONFIDENCE = False
ABSOLUTE_DISPARITY = False # True # False # True # False ABSOLUTE_DISPARITY = False # True # False # True # False
...@@ -47,8 +47,8 @@ RUN_TOT_AVG = 100 # last batches to average. Epoch is 307 training batche ...@@ -47,8 +47,8 @@ RUN_TOT_AVG = 100 # last batches to average. Epoch is 307 training batche
#BATCH_SIZE = 1080//9 # == 120 Each batch of tiles has balanced D/S tiles, shuffled batches but not inside batches #BATCH_SIZE = 1080//9 # == 120 Each batch of tiles has balanced D/S tiles, shuffled batches but not inside batches
BATCH_SIZE = 2*1080//9 # == 120 Each batch of tiles has balanced D/S tiles, shuffled batches but not inside batches BATCH_SIZE = 2*1080//9 # == 120 Each batch of tiles has balanced D/S tiles, shuffled batches but not inside batches
SHUFFLE_EPOCH = True SHUFFLE_EPOCH = True
NET_ARCH1 = 0 # 6 #0 # 4 # 3 # overwrite with argv? NET_ARCH1 = 0 # 2 #0 # 6 #0 # 4 # 3 # overwrite with argv?
NET_ARCH2 = 0 # 6 # 0 # 3 # overwrite with argv? NET_ARCH2 = 3 # 2 #0 # 6 # 0 # 3 # overwrite with argv?
ONLY_TILE = None # 4 # None # 0 # 4# None # (remove all but center tile data), put None here for normal operation) ONLY_TILE = None # 4 # None # 0 # 4# None # (remove all but center tile data), put None here for normal operation)
ZIP_LHVAR = True # combine _lvar and _hvar as odd/even elements ZIP_LHVAR = True # combine _lvar and _hvar as odd/even elements
...@@ -331,7 +331,7 @@ except IndexError: ...@@ -331,7 +331,7 @@ except IndexError:
train_filenameTFR1 = "/mnt/dde6f983-d149-435e-b4a2-88749245cc6c/home/eyesis/x3d_data/data_sets/tf_data/train_01.tfrecords" train_filenameTFR1 = "/mnt/dde6f983-d149-435e-b4a2-88749245cc6c/home/eyesis/x3d_data/data_sets/tf_data/train_01.tfrecords"
""" """
files_img = ['/home/eyesis/x3d_data/data_sets/tf_data_3x3b/img/1527257933_150165-v04.tfrecords'] files_img = ['/home/eyesis/x3d_data/data_sets/tf_data_3x3b/img/1527257933_150165-v04.tfrecords']
result_file = '/home/eyesis/x3d_data/data_sets/tf_data_3x3b/rslt/1527257933_150165-v04R.npy' result_file = '/home/eyesis/x3d_data/data_sets/tf_data_3x3b/rslt/1527257933_150165-v04R-M0-3.npy'
files_train_lvar = ["/home/eyesis/x3d_data/data_sets/tf_data_rand2/train000_R1_LE_1.5.tfrecords", files_train_lvar = ["/home/eyesis/x3d_data/data_sets/tf_data_rand2/train000_R1_LE_1.5.tfrecords",
"/home/eyesis/x3d_data/data_sets/tf_data_rand2/train001_R1_LE_1.5.tfrecords", "/home/eyesis/x3d_data/data_sets/tf_data_rand2/train001_R1_LE_1.5.tfrecords",
...@@ -364,11 +364,11 @@ import tensorflow as tf ...@@ -364,11 +364,11 @@ import tensorflow as tf
import tensorflow.contrib.slim as slim import tensorflow.contrib.slim as slim
#try: try:
eval_results(result_file, ABSOLUTE_DISPARITY) eval_results(result_file, ABSOLUTE_DISPARITY)
exit(0) #exit(0)
#except: except:
# pass pass
datasets_img = [] datasets_img = []
for fpath in files_img: for fpath in files_img:
...@@ -503,7 +503,7 @@ dataset_img_size //= BATCH_SIZE ...@@ -503,7 +503,7 @@ dataset_img_size //= BATCH_SIZE
#print_time("dataset_tt.output_types "+str(dataset_train.output_types)+", dataset_train.output_shapes "+str(dataset_train.output_shapes)+", number of elements="+str(dataset_train_size)) #print_time("dataset_tt.output_types "+str(dataset_train.output_types)+", dataset_train.output_shapes "+str(dataset_train.output_shapes)+", number of elements="+str(dataset_train_size))
dataset_tt = dataset_tt.batch(BATCH_SIZE) dataset_tt = dataset_tt.batch(BATCH_SIZE)
#dataset_tt = dataset_tt.prefetch(BATCH_SIZE) dataset_tt = dataset_tt.prefetch(BATCH_SIZE)
iterator_tt = dataset_tt.make_initializable_iterator() iterator_tt = dataset_tt.make_initializable_iterator()
next_element_tt = iterator_tt.get_next() next_element_tt = iterator_tt.get_next()
#print("dataset_tt.output_types "+str(dataset_tt.output_types)+", dataset_tt.output_shapes "+str(dataset_tt.output_shapes)+", number of elements="+str(dataset_train_size)) #print("dataset_tt.output_types "+str(dataset_tt.output_types)+", dataset_tt.output_shapes "+str(dataset_tt.output_shapes)+", number of elements="+str(dataset_train_size))
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment