"...csrc/cpu/git@developer.sourcefind.cn:OpenDAS/vision.git" did not exist on "0ebbb0abd0610c8ffe978902c06751f94a2e3197"
Commit ddf382bb authored by Asim Shankar's avatar Asim Shankar
Browse files

Respond to PR comments

parent 06dd5c7e
...@@ -61,6 +61,9 @@ def train(model, optimizer, dataset, log_interval=None): ...@@ -61,6 +61,9 @@ def train(model, optimizer, dataset, log_interval=None):
start = time.time() start = time.time()
for (batch, (images, labels)) in enumerate(tfe.Iterator(dataset)): for (batch, (images, labels)) in enumerate(tfe.Iterator(dataset)):
with tf.contrib.summary.record_summaries_every_n_global_steps(10): with tf.contrib.summary.record_summaries_every_n_global_steps(10):
# Record the operations used to compute the loss given the input,
# so that the gradient of the loss with respect to the variables
# can be computed.
with tfe.GradientTape() as tape: with tfe.GradientTape() as tape:
logits = model(images, training=True) logits = model(images, training=True)
loss_value = loss(logits, labels) loss_value = loss(logits, labels)
...@@ -111,6 +114,9 @@ def main(_): ...@@ -111,6 +114,9 @@ def main(_):
optimizer = tf.train.MomentumOptimizer(FLAGS.lr, FLAGS.momentum) optimizer = tf.train.MomentumOptimizer(FLAGS.lr, FLAGS.momentum)
if FLAGS.output_dir: if FLAGS.output_dir:
# Create directories to which summaries will be written
# tensorboard --logdir=<output_dir>
# can then be used to see the recorded summaries.
train_dir = os.path.join(FLAGS.output_dir, 'train') train_dir = os.path.join(FLAGS.output_dir, 'train')
test_dir = os.path.join(FLAGS.output_dir, 'eval') test_dir = os.path.join(FLAGS.output_dir, 'eval')
tf.gfile.MakeDirs(FLAGS.output_dir) tf.gfile.MakeDirs(FLAGS.output_dir)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment