Commit 9ea88926 authored by Oleg Dzhimiev's avatar Oleg Dzhimiev

+ exportdir for pb model format saved with Saved_Model

parent c5410d60
...@@ -21,6 +21,7 @@ qsf.TIME_LAST = qsf.TIME_START ...@@ -21,6 +21,7 @@ qsf.TIME_LAST = qsf.TIME_START
IMG_WIDTH = 324 # tiles per image row IMG_WIDTH = 324 # tiles per image row
DEBUG_LEVEL= 1 DEBUG_LEVEL= 1
try: try:
conf_file = sys.argv[1] conf_file = sys.argv[1]
except IndexError: except IndexError:
...@@ -56,6 +57,7 @@ TRAIN_BUFFER_GPU, TRAIN_BUFFER_CPU = [None]*2 ...@@ -56,6 +57,7 @@ TRAIN_BUFFER_GPU, TRAIN_BUFFER_CPU = [None]*2
TEST_TITLES = None TEST_TITLES = None
USE_SPARSE_ONLY = True USE_SPARSE_ONLY = True
LOGFILE="results-infer.txt" LOGFILE="results-infer.txt"
""" """
Next gets globals from the config file Next gets globals from the config file
""" """
...@@ -92,6 +94,11 @@ qsf.prepareFiles(dirs, ...@@ -92,6 +94,11 @@ qsf.prepareFiles(dirs,
files, files,
suffix = SUFFIX) suffix = SUFFIX)
"""
Next is tag for pb (pb == protocol buffer) model
"""
PB_TAG = "model_pb"
print ("Copying config files to results directory:\n ('%s' -> '%s')"%(conf_file,dirs['result'])) print ("Copying config files to results directory:\n ('%s' -> '%s')"%(conf_file,dirs['result']))
try: try:
os.makedirs(dirs['result']) os.makedirs(dirs['result'])
...@@ -122,8 +129,13 @@ try: ...@@ -122,8 +129,13 @@ try:
except: except:
pass pass
shutil.rmtree(dirs['exportdir'], ignore_errors=True)
builder = tf.saved_model.builder.SavedModelBuilder(dirs['exportdir'])
with tf.Session() as sess: with tf.Session() as sess:
infer_saver = tf.train.import_meta_graph(files["inference"]+'.meta') infer_saver = tf.train.import_meta_graph(files["inference"]+'.meta')
graph=tf.get_default_graph() graph=tf.get_default_graph()
ph_corr2d = graph.get_tensor_by_name('ph_corr2d:0') ph_corr2d = graph.get_tensor_by_name('ph_corr2d:0')
ph_target_disparity = graph.get_tensor_by_name('ph_target_disparity:0') ph_target_disparity = graph.get_tensor_by_name('ph_target_disparity:0')
...@@ -158,9 +170,9 @@ with tf.Session() as sess: ...@@ -158,9 +170,9 @@ with tf.Session() as sess:
replace_nans = True, replace_nans = True,
infer = True, infer = True,
keep_gt = True) # to generate same output files keep_gt = True) # to generate same output files
img_corr2d = dataset_img['corr2d'] # [?,324) img_corr2d = dataset_img['corr2d'] # (?,324)
img_target = dataset_img['target_disparity'] # [?,324) img_target = dataset_img['target_disparity'] # (?,1)
img_ntile = dataset_img['ntile'].reshape([-1]) img_ntile = dataset_img['ntile'].reshape([-1]) # (?) - 0...78k int32
#run first stage network #run first stage network
qsf.print_time("Running inferred model, stage1", end=" ") qsf.print_time("Running inferred model, stage1", end=" ")
_ = sess.run([stage1done], _ = sess.run([stage1done],
...@@ -191,6 +203,11 @@ with tf.Session() as sess: ...@@ -191,6 +203,11 @@ with tf.Session() as sess:
""" """
image_data[nimg] = None image_data[nimg] = None
# when saved tags can be read with saved_model_cli which is built from tensorflow source using bazel
builder.add_meta_graph_and_variables(sess,[PB_TAG])
if lf: if lf:
lf.close() lf.close()
writer.close() writer.close()
builder.save()
...@@ -129,7 +129,13 @@ def prepareFiles(dirs, files, suffix): ...@@ -129,7 +129,13 @@ def prepareFiles(dirs, files, suffix):
files['inference'] = 'inference' files['inference'] = 'inference'
if not 'inference' in dirs: if not 'inference' in dirs:
dirs['inference'] = dirs['result'] dirs['inference'] = dirs['result']
files['inference'] = os.path.join(dirs['inference'], files['inference']) files['inference'] = os.path.join(dirs['inference'], files['inference'])
if not 'exportdir' in files:
files['exportdir'] = 'exportdir'
if not 'exportdir' in dirs:
dirs['exportdir'] = dirs['result']
files['exportdir'] = os.path.join(dirs['exportdir'], files['exportdir'])
if not 'figures' in dirs: if not 'figures' in dirs:
dirs['figures'] = os.path.join(dirs['result'],"figs") dirs['figures'] = os.path.join(dirs['result'],"figs")
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment