Commit d7dc6bd3 authored by Oleg Dzhimiev's avatar Oleg Dzhimiev

more testing

parent 6e9382a3
...@@ -16,7 +16,6 @@ import qcstereo_functions as qsf ...@@ -16,7 +16,6 @@ import qcstereo_functions as qsf
import tensorflow as tf import tensorflow as tf
from tensorflow.python.ops import resource_variable_ops from tensorflow.python.ops import resource_variable_ops
tf.ResourceVariable = resource_variable_ops.ResourceVariable tf.ResourceVariable = resource_variable_ops.ResourceVariable
qsf.TIME_START = time.time() qsf.TIME_START = time.time()
...@@ -146,6 +145,18 @@ rv_stage1_out = tf.Variable( ...@@ -146,6 +145,18 @@ rv_stage1_out = tf.Variable(
collections = [GraphKeys.LOCAL_VARIABLES],# Works, available with tf.local_variables() collections = [GraphKeys.LOCAL_VARIABLES],# Works, available with tf.local_variables()
dtype=np.float32, dtype=np.float32,
name = 'rv_stage1_out') name = 'rv_stage1_out')
'''
rv_stage1_out = tf.get_variable("rv_stage1_out",
shape=[HEIGHT * WIDTH, NN_LAYOUT1[-1]],
dtype=tf.float32,
initializer=tf.zeros_initializer,
collections = [GraphKeys.LOCAL_VARIABLES], trainable=False)
'''
#rv_stageX_out_init_placeholder = tf.placeholder(tf.float32, shape=[HEIGHT * WIDTH, NN_LAYOUT1[-1]])
#rv_stageX_out_init_op = rv_stageX_out.assign(rv_stageX_out_init_placeholder)
##stage1_tiled = tf.reshape(rv_stage1_out.read_value(),[HEIGHT, WIDTH, -1], name = 'stage1_tiled') ##stage1_tiled = tf.reshape(rv_stage1_out.read_value(),[HEIGHT, WIDTH, -1], name = 'stage1_tiled')
stage1_tiled = tf.reshape(rv_stage1_out, [HEIGHT, WIDTH, -1], name = 'stage1_tiled') # no need to synchronize here? stage1_tiled = tf.reshape(rv_stage1_out, [HEIGHT, WIDTH, -1], name = 'stage1_tiled') # no need to synchronize here?
...@@ -220,7 +231,7 @@ tf.add_to_collection(collection_io, stage2_out_sparse) ...@@ -220,7 +231,7 @@ tf.add_to_collection(collection_io, stage2_out_sparse)
""" """
##saver=tf.train.Saver() ##saver=tf.train.Saver()
saver =tf.train.Saver(tf.global_variables()) saver =tf.train.Saver(tf.global_variables())
#saver2 =tf.train.Saver(tf.global_variables()+tf.local_variables()) #saver = tf.train.Saver(tf.global_variables()+tf.local_variables())
saver_def = saver.as_saver_def() saver_def = saver.as_saver_def()
...@@ -250,9 +261,13 @@ except: ...@@ -250,9 +261,13 @@ except:
with tf.Session() as sess: with tf.Session() as sess:
sess.run(tf.global_variables_initializer()) sess.run(tf.global_variables_initializer())
sess.run(tf.local_variables_initializer()) sess.run(tf.local_variables_initializer())
saver.restore(sess, files["checkpoints"])
saver.restore(sess, files["checkpoints"])
#tf.add_to_collection(GraphKeys.GLOBAL_VARIABLES,rv_stage1_out)
saver.save(sess, files["inference"]) #TODO: move to different subdir saver.save(sess, files["inference"]) #TODO: move to different subdir
#saver2.save(sess, files["inference"]+"_2") #TODO: move to different subdir #saver2.save(sess, files["inference"]+"_2") #TODO: move to different subdir
...@@ -263,6 +278,8 @@ with tf.Session() as sess: ...@@ -263,6 +278,8 @@ with tf.Session() as sess:
if LOGPATH: if LOGPATH:
lf=open(LOGPATH,"w") #overwrite previous (or make it "a"? lf=open(LOGPATH,"w") #overwrite previous (or make it "a"?
#_ = sess.run([rv_stageX_out_init_op],feed_dict={rv_stageX_out_init_placeholder: np.zeros((HEIGHT * WIDTH, NN_LAYOUT1[-1]))})
for nimg,_ in enumerate(image_data): for nimg,_ in enumerate(image_data):
dataset_img = qsf.readImageData( dataset_img = qsf.readImageData(
image_data = image_data, image_data = image_data,
...@@ -296,7 +313,7 @@ with tf.Session() as sess: ...@@ -296,7 +313,7 @@ with tf.Session() as sess:
try: try:
os.makedirs(os.path.dirname(result_file)) os.makedirs(os.path.dirname(result_file))
except: except:
pass pass
rslt = np.concatenate( rslt = np.concatenate(
[disp_out.reshape(-1,1), [disp_out.reshape(-1,1),
......
...@@ -129,6 +129,8 @@ try: ...@@ -129,6 +129,8 @@ try:
except: except:
pass pass
from tensorflow.python.framework.ops import GraphKeys
with tf.Session() as sess: with tf.Session() as sess:
# default option # default option
...@@ -162,10 +164,23 @@ with tf.Session() as sess: ...@@ -162,10 +164,23 @@ with tf.Session() as sess:
if not USE_SPARSE_ONLY: #Does it reduce the graph size? if not USE_SPARSE_ONLY: #Does it reduce the graph size?
stage2_out_full = graph.get_tensor_by_name('Disparity_net/stage2_out_full:0') stage2_out_full = graph.get_tensor_by_name('Disparity_net/stage2_out_full:0')
'''
if not use_saved_model:
rv_stage1_out = tf.get_variable("rv_stage1_out",
shape=[78408, 32],
dtype=tf.float32,
initializer=tf.zeros_initializer)
#collections = [GraphKeys.LOCAL_VARIABLES],trainable=False)
'''
sess.run(tf.global_variables_initializer()) sess.run(tf.global_variables_initializer())
sess.run(tf.local_variables_initializer()) sess.run(tf.local_variables_initializer())
infer_saver.restore(sess, files["inference"]) # after initializers, of course if not use_saved_model:
infer_saver.restore(sess, files["inference"]) # after initializers, of course
else:
infer_saver.restore(sess, dirs['exportdir']+"/variables/variables.data-00000-of-00001")
#infer_saver.restore(sess, files["inference"]+"_2") # after initializers, of course #infer_saver.restore(sess, files["inference"]+"_2") # after initializers, of course
merged = tf.summary.merge_all() merged = tf.summary.merge_all()
...@@ -222,8 +237,8 @@ with tf.Session() as sess: ...@@ -222,8 +237,8 @@ with tf.Session() as sess:
if not use_saved_model: if not use_saved_model:
#builder.add_meta_graph_and_variables(sess,PB_TAGS) #builder.add_meta_graph_and_variables(sess,PB_TAGS)
builder.add_meta_graph_and_variables(sess,[tf.saved_model.tag_constants.SERVING]) builder.add_meta_graph_and_variables(sess,[tf.saved_model.tag_constants.SERVING])
#builder.save(True) builder.save(True)
builder.save(False) #builder.save(False)
if lf: if lf:
lf.close() lf.close()
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment