Commit 6e9382a3 authored by Oleg Dzhimiev's avatar Oleg Dzhimiev

added tag_constant

parent 6e3f3149
...@@ -219,8 +219,11 @@ tf.add_to_collection(collection_io, stage1done) ...@@ -219,8 +219,11 @@ tf.add_to_collection(collection_io, stage1done)
tf.add_to_collection(collection_io, stage2_out_sparse) tf.add_to_collection(collection_io, stage2_out_sparse)
""" """
##saver=tf.train.Saver() ##saver=tf.train.Saver()
saver=tf.train.Saver(tf.global_variables()) saver =tf.train.Saver(tf.global_variables())
#saver2 =tf.train.Saver(tf.global_variables()+tf.local_variables())
saver_def = saver.as_saver_def() saver_def = saver.as_saver_def()
pass pass
""" """
saver_def = saver.as_saver_def() saver_def = saver.as_saver_def()
...@@ -250,7 +253,10 @@ with tf.Session() as sess: ...@@ -250,7 +253,10 @@ with tf.Session() as sess:
sess.run(tf.global_variables_initializer()) sess.run(tf.global_variables_initializer())
sess.run(tf.local_variables_initializer()) sess.run(tf.local_variables_initializer())
saver.restore(sess, files["checkpoints"]) saver.restore(sess, files["checkpoints"])
saver.save(sess, files["inference"]) #TODO: move to different subdir saver.save(sess, files["inference"]) #TODO: move to different subdir
#saver2.save(sess, files["inference"]+"_2") #TODO: move to different subdir
merged = tf.summary.merge_all() merged = tf.summary.merge_all()
writer = tf.summary.FileWriter(ROOT_PATH, sess.graph) writer = tf.summary.FileWriter(ROOT_PATH, sess.graph)
lf = None lf = None
......
...@@ -97,7 +97,7 @@ qsf.prepareFiles(dirs, ...@@ -97,7 +97,7 @@ qsf.prepareFiles(dirs,
""" """
Next is tag for pb (pb == protocol buffer) model Next is tag for pb (pb == protocol buffer) model
""" """
PB_TAGS = ["model_pb"] #PB_TAGS = ["model_pb"]
print ("Copying config files to results directory:\n ('%s' -> '%s')"%(conf_file,dirs['result'])) print ("Copying config files to results directory:\n ('%s' -> '%s')"%(conf_file,dirs['result']))
try: try:
...@@ -139,10 +139,12 @@ with tf.Session() as sess: ...@@ -139,10 +139,12 @@ with tf.Session() as sess:
if use_saved_model: if use_saved_model:
print("Model restore: using Saved_Model model MetaGraph protocol buffer") print("Model restore: using Saved_Model model MetaGraph protocol buffer")
meta_graph_source = tf.saved_model.loader.load(sess, PB_TAGS, dirs['exportdir']) meta_graph_source = tf.saved_model.loader.load(sess, [tf.saved_model.tag_constants.SERVING], dirs['exportdir'])
else: else:
print("Model restore: using conventionally saved model, but saving Saved Model for the next run") print("Model restore: using conventionally saved model, but saving Saved Model for the next run")
meta_graph_source = files["inference"]+'.meta' meta_graph_source = files["inference"]+'.meta'
print("MetaGraph source = "+str(meta_graph_source))
#meta_graph_source = files["inference"]+'_2.meta'
# remove 'exportdir' even it exsits and has anything # remove 'exportdir' even it exsits and has anything
shutil.rmtree(dirs['exportdir'], ignore_errors=True) shutil.rmtree(dirs['exportdir'], ignore_errors=True)
builder = tf.saved_model.builder.SavedModelBuilder(dirs['exportdir']) builder = tf.saved_model.builder.SavedModelBuilder(dirs['exportdir'])
...@@ -164,6 +166,7 @@ with tf.Session() as sess: ...@@ -164,6 +166,7 @@ with tf.Session() as sess:
sess.run(tf.local_variables_initializer()) sess.run(tf.local_variables_initializer())
infer_saver.restore(sess, files["inference"]) # after initializers, of course infer_saver.restore(sess, files["inference"]) # after initializers, of course
#infer_saver.restore(sess, files["inference"]+"_2") # after initializers, of course
merged = tf.summary.merge_all() merged = tf.summary.merge_all()
writer = tf.summary.FileWriter(ROOT_PATH, sess.graph) writer = tf.summary.FileWriter(ROOT_PATH, sess.graph)
...@@ -217,8 +220,10 @@ with tf.Session() as sess: ...@@ -217,8 +220,10 @@ with tf.Session() as sess:
image_data[nimg] = None image_data[nimg] = None
if not use_saved_model: if not use_saved_model:
builder.add_meta_graph_and_variables(sess,PB_TAGS) #builder.add_meta_graph_and_variables(sess,PB_TAGS)
builder.save() builder.add_meta_graph_and_variables(sess,[tf.saved_model.tag_constants.SERVING])
#builder.save(True)
builder.save(False)
if lf: if lf:
lf.close() lf.close()
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment