Commit ec91f6b8 authored by Derek Chow's avatar Derek Chow
Browse files

Rename Saver used in init_fn

Bug:
We use two different Saver objects during training, one to initialize from a
checkpoint and another to save and restore from checkpoints during training.
Both these Savers were bound to same variable name causing init_fn function
to use the latter instead of the former when called.
parent fa45b626
...@@ -213,11 +213,12 @@ def train(create_tensor_dict_fn, create_model_fn, train_config, master, task, ...@@ -213,11 +213,12 @@ def train(create_tensor_dict_fn, create_model_fn, train_config, master, task,
if train_config.fine_tune_checkpoint: if train_config.fine_tune_checkpoint:
var_map = detection_model.restore_map( var_map = detection_model.restore_map(
from_detection_checkpoint=train_config.from_detection_checkpoint) from_detection_checkpoint=train_config.from_detection_checkpoint)
var_map = variables_helper.get_variables_available_in_checkpoint( available_var_map = (variables_helper.
var_map, train_config.fine_tune_checkpoint) get_variables_available_in_checkpoint(
saver = tf.train.Saver(var_map) var_map, train_config.fine_tune_checkpoint))
init_saver = tf.train.Saver(available_var_map)
def initializer_fn(sess): def initializer_fn(sess):
saver.restore(sess, train_config.fine_tune_checkpoint) init_saver.restore(sess, train_config.fine_tune_checkpoint)
init_fn = initializer_fn init_fn = initializer_fn
with tf.device(deploy_config.optimizer_device()): with tf.device(deploy_config.optimizer_device()):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment