lingvo
                                
                                 lingvo copied to clipboard
                                
                                    lingvo copied to clipboard
                            
                            
                            
                        exported inference graph frozen?
Hi, I'm trying to convert the inference graph exported by inference_graph_exporter.InferenceGraphExporter and a checkpoint to a SavedModel for tensorflow serving. I followed the code in predictor.py, here's my code
import string
import sys
if len(sys.argv)!=3 and len(sys.argv)!=4:
 print "Usage:",sys.argv[0],"orig_ckpt_dir dest_ckpt graph.pb"
 exit(1)
orig_ckpt_dir=sys.argv[1]
dest_ckpt=sys.argv[2]
graph_path=None
if len(sys.argv) == 4:
 graph_path=sys.argv[3]
import tensorflow as tf
from lingvo import model_imports
from lingvo import model_registry
model_registry.FLAGS(sys.argv)
from lingvo.core import inference_graph_exporter
checkpoint = tf.train.latest_checkpoint(orig_ckpt_dir)
print('Using checkpoint %s' % checkpoint)
params = model_registry.GetParams('asr.librispeech.Librispeech960Wpm', 'Test')
device_opt=inference_graph_exporter.InferenceDeviceOptions(device="", retain_device_placement=False,
                                                           var_options=None, gen_init_op=False, dtype_override=None )
inference_graph = inference_graph_exporter.InferenceGraphExporter.Export(params, device_options=device_opt, export_path=graph_path)
graph=tf.Graph()
with graph.as_default():
 saver=tf.train.Saver(saver_def=inference_graph.saver_def)
 tf.import_graph_def(inference_graph.graph_def, name="")
 #graph.finalize()
with tf.Session(graph=graph) as sess:
 sess.run(graph.get_operation_by_name("init_all_tables"))
 saver.restore(sess, checkpoint)
 print(tf.trainable_variables())
 saver.save(sess, dest_ckpt)
 print "Saving done"
I used the code below to convert the newly output checkpoint to a SavedModel.
import tensorflow as tf
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.saved_model import tag_constants
import sys
sys.path.append('/tmp/lingvo')
from lingvo.core.ops import py_x_ops
if len(sys.argv)!=3:
 print "Usage:",sys.argv[0],"ckpt saved_dir"
 exit(1)
check_point=sys.argv[1]
export_dir=sys.argv[2]
builder = tf.saved_model.builder.SavedModelBuilder(export_dir)
sigs={}
with tf.Session(graph=tf.Graph()) as sess:
 saver=tf.train.import_meta_graph(check_point+".meta")
 saver.restore(sess,check_point)
 graph=tf.get_default_graph()
 input_audio=graph.get_tensor_by_name('inference/default/wav:0')
 output_hyps=graph.get_tensor_by_name('inference/default/Reshape_7:0')
 sigs[signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY] = \
  tf.saved_model.signature_def_utils.predict_signature_def({"in":input_audio},{"out":output_hyps})
 builder.add_meta_graph_and_variables(sess, [tag_constants.SERVING],signature_def_map=sigs,
  )
builder.save()
The output SavedModel has an empty 'variables' directory. After googling, it seems because the inference graph is frozen. But I'm sure that _FreezeGraphFromCheckpoint and _FreezeDefaults are not used, and I check the output checkpoint meta file and inference graph pb file, both contains VariablesV2, so I think the model is not frozen. Any idea about why? Any suggestion will be appreciated.
Hi there,
Could you try restoring this way, using the saver_def from the first snippet?
saver = tf.train.Saver(saver_def=inference_graph.saver_def) 
saver.restore(sess,check_point)