Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
ModelZoo
ResNet50_tensorflow
Commits
213125e3
Commit
213125e3
authored
Jul 18, 2017
by
Derek Chow
Browse files
Change exporter interface and give it a few new abilities.
parent
4f14cb62
Changes
3
Show whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
297 additions
and
245 deletions
+297
-245
object_detection/export_inference_graph.py
object_detection/export_inference_graph.py
+30
-28
object_detection/exporter.py
object_detection/exporter.py
+137
-131
object_detection/exporter_test.py
object_detection/exporter_test.py
+130
-86
No files found.
object_detection/export_inference_graph.py
View file @
213125e3
...
...
@@ -16,8 +16,9 @@
r
"""Tool to export an object detection model for inference.
Prepares an object detection tensorflow graph for inference using model
configuration and an optional trained checkpoint. Outputs either an inference
graph or a SavedModel (https://tensorflow.github.io/serving/serving_basic.html).
configuration and an optional trained checkpoint. Outputs inference
graph, associated checkpoint files, a frozen inference graph and a
SavedModel (https://tensorflow.github.io/serving/serving_basic.html).
The inference graph contains one of three input nodes depending on the user
specified option.
...
...
@@ -41,23 +42,28 @@ and the following output nodes returned by the model.postprocess(..):
masks for each box if its present in the dictionary of postprocessed
tensors returned by the model.
Note that currently `batch` is always 1, but we will support `batch` > 1 in
the future.
Optionally, one can freeze the graph by converting the weights in the provided
checkpoint as graph constants thereby eliminating the need to use a checkpoint
file during inference.
Note that this tool uses `use_moving_averages` from eval_config to decide
which weights to freeze.
Notes:
* Currently `batch` is always 1, but we will support `batch` > 1 in the future.
* This tool uses `use_moving_averages` from eval_config to decide which
weights to freeze.
Example Usage:
--------------
python export_inference_graph \
--input_type image_tensor \
--pipeline_config_path path/to/ssd_inception_v2.config \
--checkpoint_path path/to/model-ckpt \
--inference_graph_path path/to/inference_graph.pb
--trained_checkpoint_prefix path/to/model.ckpt \
--output_directory path/to/exported_model_directory
The expected output would be in the directory
path/to/exported_model_directory (which is created if it does not exist)
with contents:
- graph.pbtxt
- model.ckpt.data-00000-of-00001
- model.ckpt.info
- model.ckpt.meta
- frozen_inference_graph.pb
+ saved_model (a directory)
"""
import
tensorflow
as
tf
from
google.protobuf
import
text_format
...
...
@@ -70,31 +76,27 @@ flags = tf.app.flags
flags
.
DEFINE_string
(
'input_type'
,
'image_tensor'
,
'Type of input node. Can be '
'one of [`image_tensor`, `encoded_image_string_tensor`, '
'`tf_example`]'
)
flags
.
DEFINE_string
(
'pipeline_config_path'
,
''
,
flags
.
DEFINE_string
(
'pipeline_config_path'
,
None
,
'Path to a pipeline_pb2.TrainEvalPipelineConfig config '
'file.'
)
flags
.
DEFINE_string
(
'checkpoint_path'
,
''
,
'Optional path to checkpoint file. '
'If provided, bakes the weights from the checkpoint into '
'the graph.'
)
flags
.
DEFINE_string
(
'inference_graph_path'
,
''
,
'Path to write the output '
'inference graph.'
)
flags
.
DEFINE_bool
(
'export_as_saved_model'
,
False
,
'Whether the exported graph '
'should be saved as a SavedModel'
)
flags
.
DEFINE_string
(
'trained_checkpoint_prefix'
,
None
,
'Path to trained checkpoint, typically of the form '
'path/to/model.ckpt'
)
flags
.
DEFINE_string
(
'output_directory'
,
None
,
'Path to write outputs.'
)
tf
.
app
.
flags
.
MarkFlagAsRequired
(
'pipeline_config_path'
)
tf
.
app
.
flags
.
MarkFlagAsRequired
(
'trained_checkpoint_prefix'
)
tf
.
app
.
flags
.
MarkFlagAsRequired
(
'output_directory'
)
FLAGS
=
flags
.
FLAGS
def
main
(
_
):
assert
FLAGS
.
pipeline_config_path
,
'TrainEvalPipelineConfig missing.'
assert
FLAGS
.
inference_graph_path
,
'Inference graph path missing.'
assert
FLAGS
.
input_type
,
'Input type missing.'
pipeline_config
=
pipeline_pb2
.
TrainEvalPipelineConfig
()
with
tf
.
gfile
.
GFile
(
FLAGS
.
pipeline_config_path
,
'r'
)
as
f
:
text_format
.
Merge
(
f
.
read
(),
pipeline_config
)
exporter
.
export_inference_graph
(
FLAGS
.
input_type
,
pipeline_config
,
FLAGS
.
checkpoint_path
,
FLAGS
.
inference_graph_path
,
FLAGS
.
export_as_saved_model
)
exporter
.
export_inference_graph
(
FLAGS
.
input_type
,
pipeline_config
,
FLAGS
.
trained_checkpoint_prefix
,
FLAGS
.
output_directory
)
if
__name__
==
'__main__'
:
...
...
object_detection/exporter.py
View file @
213125e3
...
...
@@ -17,6 +17,7 @@
import
logging
import
os
import
tensorflow
as
tf
from
tensorflow.core.protobuf
import
rewriter_config_pb2
from
tensorflow.python
import
pywrap_tensorflow
from
tensorflow.python.client
import
session
from
tensorflow.python.framework
import
graph_util
...
...
@@ -42,6 +43,7 @@ def freeze_graph_with_def_protos(
filename_tensor_name
,
clear_devices
,
initializer_nodes
,
optimize_graph
=
True
,
variable_names_blacklist
=
''
):
"""Converts all variables in a graph and checkpoint into constants."""
del
restore_op_name
,
filename_tensor_name
# Unused by updated loading code.
...
...
@@ -61,9 +63,23 @@ def freeze_graph_with_def_protos(
for
node
in
input_graph_def
.
node
:
node
.
device
=
''
_
=
importer
.
import_graph_def
(
input_graph_def
,
name
=
''
)
with
session
.
Session
()
as
sess
:
with
tf
.
Graph
().
as_default
():
tf
.
import_graph_def
(
input_graph_def
,
name
=
''
)
if
optimize_graph
:
logging
.
info
(
'Graph Rewriter optimizations enabled'
)
rewrite_options
=
rewriter_config_pb2
.
RewriterConfig
(
optimize_tensor_layout
=
True
)
rewrite_options
.
optimizers
.
append
(
'pruning'
)
rewrite_options
.
optimizers
.
append
(
'constfold'
)
rewrite_options
.
optimizers
.
append
(
'layout'
)
graph_options
=
tf
.
GraphOptions
(
rewrite_options
=
rewrite_options
,
infer_shapes
=
True
)
else
:
logging
.
info
(
'Graph Rewriter optimizations disabled'
)
graph_options
=
tf
.
GraphOptions
()
config
=
tf
.
ConfigProto
(
graph_options
=
graph_options
)
with
session
.
Session
(
config
=
config
)
as
sess
:
if
input_saver_def
:
saver
=
saver_lib
.
Saver
(
saver_def
=
input_saver_def
)
saver
.
restore
(
sess
,
input_checkpoint
)
...
...
@@ -95,29 +111,6 @@ def freeze_graph_with_def_protos(
return
output_graph_def
def
get_frozen_graph_def
(
inference_graph_def
,
use_moving_averages
,
input_checkpoint
,
output_node_names
):
"""Freezes all variables in a graph definition."""
saver
=
None
if
use_moving_averages
:
variable_averages
=
tf
.
train
.
ExponentialMovingAverage
(
0.0
)
variables_to_restore
=
variable_averages
.
variables_to_restore
()
saver
=
tf
.
train
.
Saver
(
variables_to_restore
)
else
:
saver
=
tf
.
train
.
Saver
()
frozen_graph_def
=
freeze_graph_with_def_protos
(
input_graph_def
=
inference_graph_def
,
input_saver_def
=
saver
.
as_saver_def
(),
input_checkpoint
=
input_checkpoint
,
output_node_names
=
output_node_names
,
restore_op_name
=
'save/restore_all'
,
filename_tensor_name
=
'save/Const:0'
,
clear_devices
=
True
,
initializer_nodes
=
''
)
return
frozen_graph_def
# TODO: Support batch tf example inputs.
def
_tf_example_input_placeholder
():
tf_example_placeholder
=
tf
.
placeholder
(
...
...
@@ -151,7 +144,8 @@ input_placeholder_fn_map = {
}
def
_add_output_tensor_nodes
(
postprocessed_tensors
):
def
_add_output_tensor_nodes
(
postprocessed_tensors
,
output_collection_name
=
'inference_op'
):
"""Adds output nodes for detection boxes and scores.
Adds the following nodes for output tensors -
...
...
@@ -174,6 +168,7 @@ def _add_output_tensor_nodes(postprocessed_tensors):
'detection_masks': [batch, max_detections, mask_height, mask_width]
(optional).
'num_detections': [batch]
output_collection_name: Name of collection to add output tensors to.
Returns:
A tensor dict containing the added output tensor nodes.
...
...
@@ -191,53 +186,29 @@ def _add_output_tensor_nodes(postprocessed_tensors):
outputs
[
'num_detections'
]
=
tf
.
identity
(
num_detections
,
name
=
'num_detections'
)
if
masks
is
not
None
:
outputs
[
'detection_masks'
]
=
tf
.
identity
(
masks
,
name
=
'detection_masks'
)
for
output_key
in
outputs
:
tf
.
add_to_collection
(
output_collection_name
,
outputs
[
output_key
])
if
masks
is
not
None
:
tf
.
add_to_collection
(
output_collection_name
,
outputs
[
'detection_masks'
])
return
outputs
def
_write_inference_graph
(
inference_graph_path
,
checkpoint_path
=
None
,
use_moving_averages
=
False
,
output_node_names
=
(
'num_detections,detection_scores,'
'detection_boxes,detection_classes'
)):
"""Writes inference graph to disk with the option to bake in weights.
If checkpoint_path is not None bakes the weights into the graph thereby
eliminating the need of checkpoint files during inference. If the model
was trained with moving averages, setting use_moving_averages to true
restores the moving averages, otherwise the original set of variables
is restored.
def
_write_frozen_graph
(
frozen_graph_path
,
frozen_graph_def
):
"""Writes frozen graph to disk.
Args:
inference_graph_path: Path to write inference graph.
checkpoint_path: Optional path to the checkpoint file.
use_moving_averages: Whether to export the original or the moving averages
of the trainable variables from the checkpoint.
output_node_names: Output tensor names, defaults are: num_detections,
detection_scores, detection_boxes, detection_classes.
frozen_graph_path: Path to write inference graph.
frozen_graph_def: tf.GraphDef holding frozen graph.
"""
inference_graph_def
=
tf
.
get_default_graph
().
as_graph_def
()
if
checkpoint_path
:
output_graph_def
=
get_frozen_graph_def
(
inference_graph_def
=
inference_graph_def
,
use_moving_averages
=
use_moving_averages
,
input_checkpoint
=
checkpoint_path
,
output_node_names
=
output_node_names
,
)
with
gfile
.
GFile
(
frozen_graph_path
,
'wb'
)
as
f
:
f
.
write
(
frozen_graph_def
.
SerializeToString
())
logging
.
info
(
'%d ops in the final graph.'
,
len
(
frozen_graph_def
.
node
))
with
gfile
.
GFile
(
inference_graph_path
,
'wb'
)
as
f
:
f
.
write
(
output_graph_def
.
SerializeToString
())
logging
.
info
(
'%d ops in the final graph.'
,
len
(
output_graph_def
.
node
))
return
tf
.
train
.
write_graph
(
inference_graph_def
,
os
.
path
.
dirname
(
inference_graph_path
),
os
.
path
.
basename
(
inference_graph_path
),
as_text
=
False
)
def
_write_saved_model
(
inference_graph_path
,
inputs
,
outputs
,
checkpoint_path
=
None
,
use_moving_averages
=
False
):
def
_write_saved_model
(
saved_model_path
,
frozen_graph_def
,
inputs
,
outputs
):
"""Writes SavedModel to disk.
If checkpoint_path is not None bakes the weights into the graph thereby
...
...
@@ -247,30 +218,17 @@ def _write_saved_model(inference_graph_path, inputs, outputs,
is restored.
Args:
inference_graph_path: Path to write inference graph.
saved_model_path: Path to write SavedModel.
frozen_graph_def: tf.GraphDef holding frozen graph.
inputs: The input image tensor to use for detection.
outputs: A tensor dictionary containing the outputs of a DetectionModel.
checkpoint_path: Optional path to the checkpoint file.
use_moving_averages: Whether to export the original or the moving averages
of the trainable variables from the checkpoint.
"""
inference_graph_def
=
tf
.
get_default_graph
().
as_graph_def
()
checkpoint_graph_def
=
None
if
checkpoint_path
:
output_node_names
=
','
.
join
(
outputs
.
keys
())
checkpoint_graph_def
=
get_frozen_graph_def
(
inference_graph_def
=
inference_graph_def
,
use_moving_averages
=
use_moving_averages
,
input_checkpoint
=
checkpoint_path
,
output_node_names
=
output_node_names
)
with
tf
.
Graph
().
as_default
():
with
session
.
Session
()
as
sess
:
tf
.
import_graph_def
(
checkpoint
_graph_def
)
tf
.
import_graph_def
(
frozen
_graph_def
,
name
=
''
)
builder
=
tf
.
saved_model
.
builder
.
SavedModelBuilder
(
inference_graph
_path
)
builder
=
tf
.
saved_model
.
builder
.
SavedModelBuilder
(
saved_model
_path
)
tensor_info_inputs
=
{
'inputs'
:
tf
.
saved_model
.
utils
.
build_tensor_info
(
inputs
)}
...
...
@@ -287,53 +245,101 @@ def _write_saved_model(inference_graph_path, inputs, outputs,
builder
.
add_meta_graph_and_variables
(
sess
,
[
tf
.
saved_model
.
tag_constants
.
SERVING
],
signature_def_map
=
{
signature_constants
.
DEFAULT_SERVING_SIGNATURE_DEF_KEY
:
'
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY
'
:
detection_signature
,
},
)
builder
.
save
()
def
_write_graph_and_checkpoint
(
inference_graph_def
,
model_path
,
input_saver_def
,
trained_checkpoint_prefix
):
for
node
in
inference_graph_def
.
node
:
node
.
device
=
''
with
tf
.
Graph
().
as_default
():
tf
.
import_graph_def
(
inference_graph_def
,
name
=
''
)
with
session
.
Session
()
as
sess
:
saver
=
saver_lib
.
Saver
(
saver_def
=
input_saver_def
,
save_relative_paths
=
True
)
saver
.
restore
(
sess
,
trained_checkpoint_prefix
)
saver
.
save
(
sess
,
model_path
)
def
_export_inference_graph
(
input_type
,
detection_model
,
use_moving_averages
,
checkpoint_path
,
inference_graph_path
,
export_as_saved_model
=
False
):
trained_checkpoint_prefix
,
output_directory
,
optimize_graph
=
True
,
output_collection_name
=
'inference_op'
):
"""Export helper."""
tf
.
gfile
.
MakeDirs
(
output_directory
)
frozen_graph_path
=
os
.
path
.
join
(
output_directory
,
'frozen_inference_graph.pb'
)
saved_model_path
=
os
.
path
.
join
(
output_directory
,
'saved_model'
)
model_path
=
os
.
path
.
join
(
output_directory
,
'model.ckpt'
)
if
input_type
not
in
input_placeholder_fn_map
:
raise
ValueError
(
'Unknown input type: {}'
.
format
(
input_type
))
inputs
=
tf
.
to_float
(
input_placeholder_fn_map
[
input_type
]())
preprocessed_inputs
=
detection_model
.
preprocess
(
inputs
)
output_tensors
=
detection_model
.
predict
(
preprocessed_inputs
)
postprocessed_tensors
=
detection_model
.
postprocess
(
output_tensors
)
outputs
=
_add_output_tensor_nodes
(
postprocessed_tensors
)
out_node_names
=
list
(
outputs
.
keys
())
if
export_as_saved_model
:
_write_saved_model
(
inference_graph_path
,
inputs
,
outputs
,
checkpoint_path
,
use_moving_averages
)
outputs
=
_add_output_tensor_nodes
(
postprocessed_tensors
,
output_collection_name
)
saver
=
None
if
use_moving_averages
:
variable_averages
=
tf
.
train
.
ExponentialMovingAverage
(
0.0
)
variables_to_restore
=
variable_averages
.
variables_to_restore
()
saver
=
tf
.
train
.
Saver
(
variables_to_restore
)
else
:
_write_inference_graph
(
inference_graph_path
,
checkpoint_path
,
use_moving_averages
,
output_node_names
=
','
.
join
(
out_node_names
))
saver
=
tf
.
train
.
Saver
()
input_saver_def
=
saver
.
as_saver_def
()
_write_graph_and_checkpoint
(
inference_graph_def
=
tf
.
get_default_graph
().
as_graph_def
(),
model_path
=
model_path
,
input_saver_def
=
input_saver_def
,
trained_checkpoint_prefix
=
trained_checkpoint_prefix
)
frozen_graph_def
=
freeze_graph_with_def_protos
(
input_graph_def
=
tf
.
get_default_graph
().
as_graph_def
(),
input_saver_def
=
input_saver_def
,
input_checkpoint
=
trained_checkpoint_prefix
,
output_node_names
=
','
.
join
(
outputs
.
keys
()),
restore_op_name
=
'save/restore_all'
,
filename_tensor_name
=
'save/Const:0'
,
clear_devices
=
True
,
optimize_graph
=
optimize_graph
,
initializer_nodes
=
''
)
_write_frozen_graph
(
frozen_graph_path
,
frozen_graph_def
)
_write_saved_model
(
saved_model_path
,
frozen_graph_def
,
inputs
,
outputs
)
def
export_inference_graph
(
input_type
,
pipeline_config
,
checkpoint_path
,
inference_graph_path
,
export_as_saved_model
=
False
):
def
export_inference_graph
(
input_type
,
pipeline_config
,
trained_checkpoint_prefix
,
output_directory
,
optimize_graph
=
True
,
output_collection_name
=
'inference_op'
):
"""Exports inference graph for the model specified in the pipeline config.
Args:
input_type: Type of input for the graph. Can be one of [`image_tensor`,
`tf_example`].
pipeline_config: pipeline_pb2.TrainAndEvalPipelineConfig proto.
checkpoint_path: Path to the checkpoint file to freeze.
inference_graph_path: Path to write inference graph to.
export_as_saved_model: If the model should be exported as a SavedModel. If
false, it is saved as an inference graph.
trained_checkpoint_prefix: Path to the trained checkpoint file.
output_directory: Path to write outputs.
optimize_graph: Whether to optimize graph using Grappler.
output_collection_name: Name of collection to add output tensors to.
If None, does not add output tensors to a collection.
"""
detection_model
=
model_builder
.
build
(
pipeline_config
.
model
,
is_training
=
False
)
_export_inference_graph
(
input_type
,
detection_model
,
pipeline_config
.
eval_config
.
use_moving_averages
,
checkpoint_p
ath
,
inference_graph_path
,
export_as_saved_model
)
trained_
checkpoint_p
refix
,
output_directory
,
optimize_graph
,
output_collection_name
)
object_detection/exporter_test.py
View file @
213125e3
...
...
@@ -103,71 +103,62 @@ class ExportInferenceGraphTest(tf.test.TestCase):
return
example
def
test_export_graph_with_image_tensor_input
(
self
):
tmp_dir
=
self
.
get_temp_dir
()
trained_checkpoint_prefix
=
os
.
path
.
join
(
tmp_dir
,
'model.ckpt'
)
self
.
_save_checkpoint_from_mock_model
(
trained_checkpoint_prefix
,
use_moving_averages
=
False
)
with
mock
.
patch
.
object
(
model_builder
,
'build'
,
autospec
=
True
)
as
mock_builder
:
mock_builder
.
return_value
=
FakeModel
()
inference_graph_path
=
os
.
path
.
join
(
self
.
get_temp_dir
(),
'exported_graph.pbtxt'
)
output_directory
=
os
.
path
.
join
(
tmp_dir
,
'output'
)
pipeline_config
=
pipeline_pb2
.
TrainEvalPipelineConfig
()
pipeline_config
.
eval_config
.
use_moving_averages
=
False
exporter
.
export_inference_graph
(
input_type
=
'image_tensor'
,
pipeline_config
=
pipeline_config
,
checkpoint_p
ath
=
None
,
inference_graph_path
=
inference_graph_path
)
trained_
checkpoint_p
refix
=
trained_checkpoint_prefix
,
output_directory
=
output_directory
)
def
test_export_graph_with_tf_example_input
(
self
):
tmp_dir
=
self
.
get_temp_dir
()
trained_checkpoint_prefix
=
os
.
path
.
join
(
tmp_dir
,
'model.ckpt'
)
self
.
_save_checkpoint_from_mock_model
(
trained_checkpoint_prefix
,
use_moving_averages
=
False
)
with
mock
.
patch
.
object
(
model_builder
,
'build'
,
autospec
=
True
)
as
mock_builder
:
mock_builder
.
return_value
=
FakeModel
()
inference_graph_path
=
os
.
path
.
join
(
self
.
get_temp_dir
(),
'exported_graph.pbtxt'
)
output_directory
=
os
.
path
.
join
(
tmp_dir
,
'output'
)
pipeline_config
=
pipeline_pb2
.
TrainEvalPipelineConfig
()
pipeline_config
.
eval_config
.
use_moving_averages
=
False
exporter
.
export_inference_graph
(
input_type
=
'tf_example'
,
pipeline_config
=
pipeline_config
,
checkpoint_p
ath
=
None
,
inference_graph_path
=
inference_graph_path
)
trained_
checkpoint_p
refix
=
trained_checkpoint_prefix
,
output_directory
=
output_directory
)
def
test_export_graph_with_encoded_image_string_input
(
self
):
with
mock
.
patch
.
object
(
model_builder
,
'build'
,
autospec
=
True
)
as
mock_builder
:
mock_builder
.
return_value
=
FakeModel
()
inference_graph_path
=
os
.
path
.
join
(
self
.
get_temp_dir
(),
'exported_graph.pbtxt'
)
pipeline_config
=
pipeline_pb2
.
TrainEvalPipelineConfig
()
pipeline_config
.
eval_config
.
use_moving_averages
=
False
exporter
.
export_inference_graph
(
input_type
=
'encoded_image_string_tensor'
,
pipeline_config
=
pipeline_config
,
checkpoint_path
=
None
,
inference_graph_path
=
inference_graph_path
)
def
test_export_frozen_graph
(
self
):
checkpoint_path
=
os
.
path
.
join
(
self
.
get_temp_dir
(),
'model-ckpt'
)
self
.
_save_checkpoint_from_mock_model
(
checkpoint_path
,
tmp_dir
=
self
.
get_temp_dir
()
trained_checkpoint_prefix
=
os
.
path
.
join
(
tmp_dir
,
'model.ckpt'
)
self
.
_save_checkpoint_from_mock_model
(
trained_checkpoint_prefix
,
use_moving_averages
=
False
)
inference_graph_path
=
os
.
path
.
join
(
self
.
get_temp_dir
(),
'exported_graph.pb'
)
with
mock
.
patch
.
object
(
model_builder
,
'build'
,
autospec
=
True
)
as
mock_builder
:
mock_builder
.
return_value
=
FakeModel
()
output_directory
=
os
.
path
.
join
(
tmp_dir
,
'output'
)
pipeline_config
=
pipeline_pb2
.
TrainEvalPipelineConfig
()
pipeline_config
.
eval_config
.
use_moving_averages
=
False
exporter
.
export_inference_graph
(
input_type
=
'
image
_tensor'
,
input_type
=
'
encoded_image_string
_tensor'
,
pipeline_config
=
pipeline_config
,
checkpoint_p
ath
=
checkpoint_p
ath
,
inference_graph_path
=
inference_graph_path
)
trained_
checkpoint_p
refix
=
trained_
checkpoint_p
refix
,
output_directory
=
output_directory
)
def
test_export_frozen_graph_with_moving_averages
(
self
):
checkpoint_path
=
os
.
path
.
join
(
self
.
get_temp_dir
(),
'model-ckpt'
)
self
.
_save_checkpoint_from_mock_model
(
checkpoint_path
,
def
test_export_graph_with_moving_averages
(
self
):
tmp_dir
=
self
.
get_temp_dir
()
trained_checkpoint_prefix
=
os
.
path
.
join
(
tmp_dir
,
'model.ckpt'
)
self
.
_save_checkpoint_from_mock_model
(
trained_checkpoint_prefix
,
use_moving_averages
=
True
)
inference_graph_path
=
os
.
path
.
join
(
self
.
get_temp_dir
(),
'exported_graph.pb'
)
output_directory
=
os
.
path
.
join
(
tmp_dir
,
'output'
)
with
mock
.
patch
.
object
(
model_builder
,
'build'
,
autospec
=
True
)
as
mock_builder
:
mock_builder
.
return_value
=
FakeModel
()
...
...
@@ -176,15 +167,17 @@ class ExportInferenceGraphTest(tf.test.TestCase):
exporter
.
export_inference_graph
(
input_type
=
'image_tensor'
,
pipeline_config
=
pipeline_config
,
checkpoint_p
ath
=
checkpoint_p
ath
,
inference_graph_path
=
inference_graph_path
)
trained_
checkpoint_p
refix
=
trained_
checkpoint_p
refix
,
output_directory
=
output_directory
)
def
test_export_model_with_all_output_nodes
(
self
):
checkpoint_path
=
os
.
path
.
join
(
self
.
get_temp_dir
(),
'model-ckpt'
)
self
.
_save_checkpoint_from_mock_model
(
checkpoint_path
,
use_moving_averages
=
False
)
inference_graph_path
=
os
.
path
.
join
(
self
.
get_temp_dir
(),
'exported_graph.pb'
)
tmp_dir
=
self
.
get_temp_dir
()
trained_checkpoint_prefix
=
os
.
path
.
join
(
tmp_dir
,
'model.ckpt'
)
self
.
_save_checkpoint_from_mock_model
(
trained_checkpoint_prefix
,
use_moving_averages
=
True
)
output_directory
=
os
.
path
.
join
(
tmp_dir
,
'output'
)
inference_graph_path
=
os
.
path
.
join
(
output_directory
,
'frozen_inference_graph.pb'
)
with
mock
.
patch
.
object
(
model_builder
,
'build'
,
autospec
=
True
)
as
mock_builder
:
mock_builder
.
return_value
=
FakeModel
(
add_detection_masks
=
True
)
...
...
@@ -192,8 +185,8 @@ class ExportInferenceGraphTest(tf.test.TestCase):
exporter
.
export_inference_graph
(
input_type
=
'image_tensor'
,
pipeline_config
=
pipeline_config
,
checkpoint_p
ath
=
checkpoint_p
ath
,
inference_graph_path
=
inference_graph_path
)
trained_
checkpoint_p
refix
=
trained_
checkpoint_p
refix
,
output_directory
=
output_directory
)
inference_graph
=
self
.
_load_inference_graph
(
inference_graph_path
)
with
self
.
test_session
(
graph
=
inference_graph
):
inference_graph
.
get_tensor_by_name
(
'image_tensor:0'
)
...
...
@@ -204,11 +197,13 @@ class ExportInferenceGraphTest(tf.test.TestCase):
inference_graph
.
get_tensor_by_name
(
'num_detections:0'
)
def
test_export_model_with_detection_only_nodes
(
self
):
checkpoint_path
=
os
.
path
.
join
(
self
.
get_temp_dir
(),
'model-ckpt'
)
self
.
_save_checkpoint_from_mock_model
(
checkpoint_path
,
use_moving_averages
=
False
)
inference_graph_path
=
os
.
path
.
join
(
self
.
get_temp_dir
(),
'exported_graph.pb'
)
tmp_dir
=
self
.
get_temp_dir
()
trained_checkpoint_prefix
=
os
.
path
.
join
(
tmp_dir
,
'model.ckpt'
)
self
.
_save_checkpoint_from_mock_model
(
trained_checkpoint_prefix
,
use_moving_averages
=
True
)
output_directory
=
os
.
path
.
join
(
tmp_dir
,
'output'
)
inference_graph_path
=
os
.
path
.
join
(
output_directory
,
'frozen_inference_graph.pb'
)
with
mock
.
patch
.
object
(
model_builder
,
'build'
,
autospec
=
True
)
as
mock_builder
:
mock_builder
.
return_value
=
FakeModel
(
add_detection_masks
=
False
)
...
...
@@ -216,8 +211,8 @@ class ExportInferenceGraphTest(tf.test.TestCase):
exporter
.
export_inference_graph
(
input_type
=
'image_tensor'
,
pipeline_config
=
pipeline_config
,
checkpoint_p
ath
=
checkpoint_p
ath
,
inference_graph_path
=
inference_graph_path
)
trained_
checkpoint_p
refix
=
trained_
checkpoint_p
refix
,
output_directory
=
output_directory
)
inference_graph
=
self
.
_load_inference_graph
(
inference_graph_path
)
with
self
.
test_session
(
graph
=
inference_graph
):
inference_graph
.
get_tensor_by_name
(
'image_tensor:0'
)
...
...
@@ -229,11 +224,13 @@ class ExportInferenceGraphTest(tf.test.TestCase):
inference_graph
.
get_tensor_by_name
(
'detection_masks:0'
)
def
test_export_and_run_inference_with_image_tensor
(
self
):
checkpoint_path
=
os
.
path
.
join
(
self
.
get_temp_dir
(),
'model-ckpt'
)
self
.
_save_checkpoint_from_mock_model
(
checkpoint_path
,
use_moving_averages
=
False
)
inference_graph_path
=
os
.
path
.
join
(
self
.
get_temp_dir
(),
'exported_graph.pb'
)
tmp_dir
=
self
.
get_temp_dir
()
trained_checkpoint_prefix
=
os
.
path
.
join
(
tmp_dir
,
'model.ckpt'
)
self
.
_save_checkpoint_from_mock_model
(
trained_checkpoint_prefix
,
use_moving_averages
=
True
)
output_directory
=
os
.
path
.
join
(
tmp_dir
,
'output'
)
inference_graph_path
=
os
.
path
.
join
(
output_directory
,
'frozen_inference_graph.pb'
)
with
mock
.
patch
.
object
(
model_builder
,
'build'
,
autospec
=
True
)
as
mock_builder
:
mock_builder
.
return_value
=
FakeModel
(
add_detection_masks
=
True
)
...
...
@@ -242,8 +239,8 @@ class ExportInferenceGraphTest(tf.test.TestCase):
exporter
.
export_inference_graph
(
input_type
=
'image_tensor'
,
pipeline_config
=
pipeline_config
,
checkpoint_p
ath
=
checkpoint_p
ath
,
inference_graph_path
=
inference_graph_path
)
trained_
checkpoint_p
refix
=
trained_
checkpoint_p
refix
,
output_directory
=
output_directory
)
inference_graph
=
self
.
_load_inference_graph
(
inference_graph_path
)
with
self
.
test_session
(
graph
=
inference_graph
)
as
sess
:
...
...
@@ -276,11 +273,13 @@ class ExportInferenceGraphTest(tf.test.TestCase):
return
encoded_string
.
eval
()
def
test_export_and_run_inference_with_encoded_image_string_tensor
(
self
):
checkpoint_path
=
os
.
path
.
join
(
self
.
get_temp_dir
(),
'model-ckpt'
)
self
.
_save_checkpoint_from_mock_model
(
checkpoint_path
,
use_moving_averages
=
False
)
inference_graph_path
=
os
.
path
.
join
(
self
.
get_temp_dir
(),
'exported_graph.pb'
)
tmp_dir
=
self
.
get_temp_dir
()
trained_checkpoint_prefix
=
os
.
path
.
join
(
tmp_dir
,
'model.ckpt'
)
self
.
_save_checkpoint_from_mock_model
(
trained_checkpoint_prefix
,
use_moving_averages
=
True
)
output_directory
=
os
.
path
.
join
(
tmp_dir
,
'output'
)
inference_graph_path
=
os
.
path
.
join
(
output_directory
,
'frozen_inference_graph.pb'
)
with
mock
.
patch
.
object
(
model_builder
,
'build'
,
autospec
=
True
)
as
mock_builder
:
mock_builder
.
return_value
=
FakeModel
(
add_detection_masks
=
True
)
...
...
@@ -289,8 +288,8 @@ class ExportInferenceGraphTest(tf.test.TestCase):
exporter
.
export_inference_graph
(
input_type
=
'encoded_image_string_tensor'
,
pipeline_config
=
pipeline_config
,
checkpoint_p
ath
=
checkpoint_p
ath
,
inference_graph_path
=
inference_graph_path
)
trained_
checkpoint_p
refix
=
trained_
checkpoint_p
refix
,
output_directory
=
output_directory
)
inference_graph
=
self
.
_load_inference_graph
(
inference_graph_path
)
jpg_image_str
=
self
.
_create_encoded_image_string
(
...
...
@@ -318,11 +317,13 @@ class ExportInferenceGraphTest(tf.test.TestCase):
self
.
assertAllClose
(
num_detections_np
,
[
2
])
def
test_export_and_run_inference_with_tf_example
(
self
):
checkpoint_path
=
os
.
path
.
join
(
self
.
get_temp_dir
(),
'model-ckpt'
)
self
.
_save_checkpoint_from_mock_model
(
checkpoint_path
,
use_moving_averages
=
False
)
inference_graph_path
=
os
.
path
.
join
(
self
.
get_temp_dir
(),
'exported_graph.pb'
)
tmp_dir
=
self
.
get_temp_dir
()
trained_checkpoint_prefix
=
os
.
path
.
join
(
tmp_dir
,
'model.ckpt'
)
self
.
_save_checkpoint_from_mock_model
(
trained_checkpoint_prefix
,
use_moving_averages
=
True
)
output_directory
=
os
.
path
.
join
(
tmp_dir
,
'output'
)
inference_graph_path
=
os
.
path
.
join
(
output_directory
,
'frozen_inference_graph.pb'
)
with
mock
.
patch
.
object
(
model_builder
,
'build'
,
autospec
=
True
)
as
mock_builder
:
mock_builder
.
return_value
=
FakeModel
(
add_detection_masks
=
True
)
...
...
@@ -331,8 +332,8 @@ class ExportInferenceGraphTest(tf.test.TestCase):
exporter
.
export_inference_graph
(
input_type
=
'tf_example'
,
pipeline_config
=
pipeline_config
,
checkpoint_p
ath
=
checkpoint_p
ath
,
inference_graph_path
=
inference_graph_path
)
trained_
checkpoint_p
refix
=
trained_
checkpoint_p
refix
,
output_directory
=
output_directory
)
inference_graph
=
self
.
_load_inference_graph
(
inference_graph_path
)
with
self
.
test_session
(
graph
=
inference_graph
)
as
sess
:
...
...
@@ -354,11 +355,12 @@ class ExportInferenceGraphTest(tf.test.TestCase):
self
.
assertAllClose
(
num_detections
,
[
2
])
def
test_export_saved_model_and_run_inference
(
self
):
checkpoint_path
=
os
.
path
.
join
(
self
.
get_temp_dir
(),
'model-ckpt'
)
self
.
_save_checkpoint_from_mock_model
(
checkpoint_path
,
tmp_dir
=
self
.
get_temp_dir
()
trained_checkpoint_prefix
=
os
.
path
.
join
(
tmp_dir
,
'model.ckpt'
)
self
.
_save_checkpoint_from_mock_model
(
trained_checkpoint_prefix
,
use_moving_averages
=
False
)
inference_graph_path
=
os
.
path
.
join
(
self
.
get_temp_dir
(),
'saved_model'
)
output_directory
=
os
.
path
.
join
(
tmp_dir
,
'output'
)
saved_model_path
=
os
.
path
.
join
(
output_directory
,
'saved_model'
)
with
mock
.
patch
.
object
(
model_builder
,
'build'
,
autospec
=
True
)
as
mock_builder
:
...
...
@@ -368,20 +370,19 @@ class ExportInferenceGraphTest(tf.test.TestCase):
exporter
.
export_inference_graph
(
input_type
=
'tf_example'
,
pipeline_config
=
pipeline_config
,
checkpoint_path
=
checkpoint_path
,
inference_graph_path
=
inference_graph_path
,
export_as_saved_model
=
True
)
trained_checkpoint_prefix
=
trained_checkpoint_prefix
,
output_directory
=
output_directory
)
with
tf
.
Graph
().
as_default
()
as
od_graph
:
with
self
.
test_session
(
graph
=
od_graph
)
as
sess
:
tf
.
saved_model
.
loader
.
load
(
sess
,
[
tf
.
saved_model
.
tag_constants
.
SERVING
],
inference_graph
_path
)
tf_example
=
od_graph
.
get_tensor_by_name
(
'
import/
tf_example:0'
)
boxes
=
od_graph
.
get_tensor_by_name
(
'
import/
detection_boxes:0'
)
scores
=
od_graph
.
get_tensor_by_name
(
'
import/
detection_scores:0'
)
classes
=
od_graph
.
get_tensor_by_name
(
'
import/
detection_classes:0'
)
masks
=
od_graph
.
get_tensor_by_name
(
'
import/
detection_masks:0'
)
num_detections
=
od_graph
.
get_tensor_by_name
(
'
import/
num_detections:0'
)
sess
,
[
tf
.
saved_model
.
tag_constants
.
SERVING
],
saved_model
_path
)
tf_example
=
od_graph
.
get_tensor_by_name
(
'tf_example:0'
)
boxes
=
od_graph
.
get_tensor_by_name
(
'detection_boxes:0'
)
scores
=
od_graph
.
get_tensor_by_name
(
'detection_scores:0'
)
classes
=
od_graph
.
get_tensor_by_name
(
'detection_classes:0'
)
masks
=
od_graph
.
get_tensor_by_name
(
'detection_masks:0'
)
num_detections
=
od_graph
.
get_tensor_by_name
(
'num_detections:0'
)
(
boxes
,
scores
,
classes
,
masks
,
num_detections
)
=
sess
.
run
(
[
boxes
,
scores
,
classes
,
masks
,
num_detections
],
feed_dict
=
{
tf_example
:
self
.
_create_tf_example
(
...
...
@@ -393,5 +394,48 @@ class ExportInferenceGraphTest(tf.test.TestCase):
self
.
assertAllClose
(
masks
,
np
.
arange
(
32
).
reshape
([
2
,
4
,
4
]))
self
.
assertAllClose
(
num_detections
,
[
2
])
def
test_export_checkpoint_and_run_inference
(
self
):
tmp_dir
=
self
.
get_temp_dir
()
trained_checkpoint_prefix
=
os
.
path
.
join
(
tmp_dir
,
'model.ckpt'
)
self
.
_save_checkpoint_from_mock_model
(
trained_checkpoint_prefix
,
use_moving_averages
=
False
)
output_directory
=
os
.
path
.
join
(
tmp_dir
,
'output'
)
model_path
=
os
.
path
.
join
(
output_directory
,
'model.ckpt'
)
meta_graph_path
=
model_path
+
'.meta'
with
mock
.
patch
.
object
(
model_builder
,
'build'
,
autospec
=
True
)
as
mock_builder
:
mock_builder
.
return_value
=
FakeModel
(
add_detection_masks
=
True
)
pipeline_config
=
pipeline_pb2
.
TrainEvalPipelineConfig
()
pipeline_config
.
eval_config
.
use_moving_averages
=
False
exporter
.
export_inference_graph
(
input_type
=
'tf_example'
,
pipeline_config
=
pipeline_config
,
trained_checkpoint_prefix
=
trained_checkpoint_prefix
,
output_directory
=
output_directory
)
with
tf
.
Graph
().
as_default
()
as
od_graph
:
with
self
.
test_session
(
graph
=
od_graph
)
as
sess
:
new_saver
=
tf
.
train
.
import_meta_graph
(
meta_graph_path
)
new_saver
.
restore
(
sess
,
model_path
)
tf_example
=
od_graph
.
get_tensor_by_name
(
'tf_example:0'
)
boxes
=
od_graph
.
get_tensor_by_name
(
'detection_boxes:0'
)
scores
=
od_graph
.
get_tensor_by_name
(
'detection_scores:0'
)
classes
=
od_graph
.
get_tensor_by_name
(
'detection_classes:0'
)
masks
=
od_graph
.
get_tensor_by_name
(
'detection_masks:0'
)
num_detections
=
od_graph
.
get_tensor_by_name
(
'num_detections:0'
)
(
boxes
,
scores
,
classes
,
masks
,
num_detections
)
=
sess
.
run
(
[
boxes
,
scores
,
classes
,
masks
,
num_detections
],
feed_dict
=
{
tf_example
:
self
.
_create_tf_example
(
np
.
ones
((
4
,
4
,
3
)).
astype
(
np
.
uint8
))})
self
.
assertAllClose
(
boxes
,
[[
0.0
,
0.0
,
0.5
,
0.5
],
[
0.5
,
0.5
,
0.8
,
0.8
]])
self
.
assertAllClose
(
scores
,
[[
0.7
,
0.6
]])
self
.
assertAllClose
(
classes
,
[[
1
,
2
]])
self
.
assertAllClose
(
masks
,
np
.
arange
(
32
).
reshape
([
2
,
4
,
4
]))
self
.
assertAllClose
(
num_detections
,
[
2
])
if
__name__
==
'__main__'
:
tf
.
test
.
main
()
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment