Commit 3a68b363 authored by Jennifer Villa's avatar Jennifer Villa
Browse files

Adding knob to export_inference_graph.py to allow users to specify

batch size for exported model. Defaults to None to allow for flexible
batching.
parent 6d140f13
...@@ -75,6 +75,11 @@ tf.app.flags.DEFINE_integer( ...@@ -75,6 +75,11 @@ tf.app.flags.DEFINE_integer(
'image_size', None, 'image_size', None,
'The image size to use, otherwise use the model default_image_size.') 'The image size to use, otherwise use the model default_image_size.')
tf.app.flags.DEFINE_integer(
'batch_size', None,
'Batch size for the exported model. Defaulted to "None" so batch size can '
'be specified at model runtime.')
tf.app.flags.DEFINE_string('dataset_name', 'imagenet', tf.app.flags.DEFINE_string('dataset_name', 'imagenet',
'The name of the dataset to use with the model.') 'The name of the dataset to use with the model.')
...@@ -106,7 +111,8 @@ def main(_): ...@@ -106,7 +111,8 @@ def main(_):
is_training=FLAGS.is_training) is_training=FLAGS.is_training)
image_size = FLAGS.image_size or network_fn.default_image_size image_size = FLAGS.image_size or network_fn.default_image_size
placeholder = tf.placeholder(name='input', dtype=tf.float32, placeholder = tf.placeholder(name='input', dtype=tf.float32,
shape=[1, image_size, image_size, 3]) shape=[FLAGS.batch_size, image_size,
image_size, 3])
network_fn(placeholder) network_fn(placeholder)
graph_def = graph.as_graph_def() graph_def = graph.as_graph_def()
with gfile.GFile(FLAGS.output_file, 'wb') as f: with gfile.GFile(FLAGS.output_file, 'wb') as f:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment