Commit 5bc70b70 authored by Oleg Dzhimiev's avatar Oleg Dzhimiev

cleanup

parent 5414fe8c
...@@ -151,7 +151,7 @@ rv_stage1_out = tf.get_variable("rv_stage1_out", ...@@ -151,7 +151,7 @@ rv_stage1_out = tf.get_variable("rv_stage1_out",
shape=[HEIGHT * WIDTH, NN_LAYOUT1[-1]], shape=[HEIGHT * WIDTH, NN_LAYOUT1[-1]],
dtype=tf.float32, dtype=tf.float32,
initializer=tf.zeros_initializer, initializer=tf.zeros_initializer,
collections = [GraphKeys.LOCAL_VARIABLES], trainable=False) collections = [GraphKeys.LOCAL_VARIABLES],trainable=False)
''' '''
#rv_stageX_out_init_placeholder = tf.placeholder(tf.float32, shape=[HEIGHT * WIDTH, NN_LAYOUT1[-1]]) #rv_stageX_out_init_placeholder = tf.placeholder(tf.float32, shape=[HEIGHT * WIDTH, NN_LAYOUT1[-1]])
...@@ -267,10 +267,16 @@ with tf.Session() as sess: ...@@ -267,10 +267,16 @@ with tf.Session() as sess:
saver.restore(sess, files["checkpoints"]) saver.restore(sess, files["checkpoints"])
#tf.add_to_collection(GraphKeys.GLOBAL_VARIABLES,rv_stage1_out) '''
rv_stage1_out belongs to GraphKeys.LOCAL_VARIABLES
Now when weights/biases are restored from 'checkpoints',
that do not have this variable, add it to globals.
Actually it could have been declared right here - this
needs testing.
'''
tf.add_to_collection(GraphKeys.GLOBAL_VARIABLES, rv_stage1_out)
saver.save(sess, files["inference"]) #TODO: move to different subdir saver.save(sess, files["inference"])
#saver2.save(sess, files["inference"]+"_2") #TODO: move to different subdir
merged = tf.summary.merge_all() merged = tf.summary.merge_all()
writer = tf.summary.FileWriter(ROOT_PATH, sess.graph) writer = tf.summary.FileWriter(ROOT_PATH, sess.graph)
...@@ -329,8 +335,9 @@ with tf.Session() as sess: ...@@ -329,8 +335,9 @@ with tf.Session() as sess:
Remove dataset_img (if it is not [0] to reduce memory footprint Remove dataset_img (if it is not [0] to reduce memory footprint
""" """
image_data[nimg] = None image_data[nimg] = None
meta_graph_def = tf.train.export_meta_graph(files["inference"]+'.meta') # is this needed? why would it be?
#meta_graph_def = tf.train.export_meta_graph(files["inference"]+'.meta')
if lf: if lf:
......
...@@ -265,13 +265,15 @@ with tf.Session() as sess: ...@@ -265,13 +265,15 @@ with tf.Session() as sess:
sess.run(tf.global_variables_initializer()) sess.run(tf.global_variables_initializer())
sess.run(tf.local_variables_initializer()) sess.run(tf.local_variables_initializer())
saver.restore(sess, files["checkpoints"]) # so, here I need to restore from inference and save to inference_global
#saver.restore(sess, files["checkpoints"])
saver.restore(sess, files["inference"])
#tf.add_to_collection(GraphKeys.GLOBAL_VARIABLES,rv_stage1_out) # now add to global
tf.add_to_collection(GraphKeys.GLOBAL_VARIABLES,rv_stage1_out)
saver.save(sess, files["inference"]) #TODO: move to different subdir saver.save(sess, 'data_sets/tf_data_5x5_main_13_heur/inference_global/model')
#saver2.save(sess, files["inference"]+"_2") #TODO: move to different subdir
merged = tf.summary.merge_all() merged = tf.summary.merge_all()
writer = tf.summary.FileWriter(ROOT_PATH, sess.graph) writer = tf.summary.FileWriter(ROOT_PATH, sess.graph)
lf = None lf = None
......
...@@ -129,31 +129,29 @@ try: ...@@ -129,31 +129,29 @@ try:
except: except:
pass pass
from tensorflow.python.framework.ops import GraphKeys
with tf.Session() as sess: with tf.Session() as sess:
# default option # Actually, refresh all the time and have an extra script to restore from it.
use_saved_model = False # use_Saved_Model = False
if os.path.isdir(dirs['exportdir']):
# check if dir contains "Saved Model" model #if os.path.isdir(dirs['exportdir']):
use_saved_model = tf.saved_model.loader.maybe_saved_model_directory(dirs['exportdir']) # # check if dir contains "Saved Model" model
# use_saved_model = tf.saved_model.loader.maybe_saved_model_directory(dirs['exportdir'])
if use_saved_model:
print("Model restore: using Saved_Model model MetaGraph protocol buffer") #if use_saved_model:
meta_graph_source = tf.saved_model.loader.load(sess, [tf.saved_model.tag_constants.SERVING], dirs['exportdir']) # print("Model restore: using Saved_Model model MetaGraph protocol buffer")
else: # meta_graph_source = tf.saved_model.loader.load(sess, [tf.saved_model.tag_constants.SERVING], dirs['exportdir'])
print("Model restore: using conventionally saved model, but saving Saved Model for the next run") #else:
meta_graph_source = files["inference"]+'.meta'
print("MetaGraph source = "+str(meta_graph_source)) meta_graph_source = files["inference"]+'.meta'
#meta_graph_source = files["inference"]+'_2.meta'
# remove 'exportdir' even it exsits and has anything print("Model restore: using conventionally saved model, but saving Saved Model for the next run")
shutil.rmtree(dirs['exportdir'], ignore_errors=True) print("MetaGraph source = "+str(meta_graph_source))
builder = tf.saved_model.builder.SavedModelBuilder(dirs['exportdir'])
infer_saver = tf.train.import_meta_graph(meta_graph_source) infer_saver = tf.train.import_meta_graph(meta_graph_source)
graph=tf.get_default_graph() graph=tf.get_default_graph()
ph_corr2d = graph.get_tensor_by_name('ph_corr2d:0') ph_corr2d = graph.get_tensor_by_name('ph_corr2d:0')
ph_target_disparity = graph.get_tensor_by_name('ph_target_disparity:0') ph_target_disparity = graph.get_tensor_by_name('ph_target_disparity:0')
ph_ntile = graph.get_tensor_by_name('ph_ntile:0') ph_ntile = graph.get_tensor_by_name('ph_ntile:0')
...@@ -164,27 +162,17 @@ with tf.Session() as sess: ...@@ -164,27 +162,17 @@ with tf.Session() as sess:
if not USE_SPARSE_ONLY: #Does it reduce the graph size? if not USE_SPARSE_ONLY: #Does it reduce the graph size?
stage2_out_full = graph.get_tensor_by_name('Disparity_net/stage2_out_full:0') stage2_out_full = graph.get_tensor_by_name('Disparity_net/stage2_out_full:0')
'''
if not use_saved_model:
rv_stage1_out = tf.get_variable("rv_stage1_out",
shape=[78408, 32],
dtype=tf.float32,
initializer=tf.zeros_initializer)
#collections = [GraphKeys.LOCAL_VARIABLES],trainable=False)
'''
sess.run(tf.global_variables_initializer()) sess.run(tf.global_variables_initializer())
sess.run(tf.local_variables_initializer()) sess.run(tf.local_variables_initializer())
if not use_saved_model:
infer_saver.restore(sess, files["inference"]) # after initializers, of course
else:
infer_saver.restore(sess, dirs['exportdir']+"/variables/variables.data-00000-of-00001")
#infer_saver.restore(sess, files["inference"]+"_2") # after initializers, of course infer_saver.restore(sess, files["inference"])
merged = tf.summary.merge_all() merged = tf.summary.merge_all()
writer = tf.summary.FileWriter(ROOT_PATH, sess.graph) writer = tf.summary.FileWriter(ROOT_PATH, sess.graph)
lf = None lf = None
if LOGPATH: if LOGPATH:
lf=open(LOGPATH,"w") #overwrite previous (or make it "a"? lf=open(LOGPATH,"w") #overwrite previous (or make it "a"?
...@@ -234,11 +222,14 @@ with tf.Session() as sess: ...@@ -234,11 +222,14 @@ with tf.Session() as sess:
""" """
image_data[nimg] = None image_data[nimg] = None
if not use_saved_model: #builder.add_meta_graph_and_variables(sess,PB_TAGS)
#builder.add_meta_graph_and_variables(sess,PB_TAGS) # clean
builder.add_meta_graph_and_variables(sess,[tf.saved_model.tag_constants.SERVING]) shutil.rmtree(dirs['exportdir'], ignore_errors=True)
builder.save(True) # save MetaGraph to Saved_Model as *.pb
#builder.save(False) builder = tf.saved_model.builder.SavedModelBuilder(dirs['exportdir'])
builder.add_meta_graph_and_variables(sess,[tf.saved_model.tag_constants.SERVING])
#builder.save(True)
builder.save(False) # True = *.pbtxt, False = *.pb
if lf: if lf:
lf.close() lf.close()
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment