Commit 75d3de64 authored by Mostafa Rahmani's avatar Mostafa Rahmani Committed by GitHub
Browse files

Update cifar10_multi_gpu_train.py

bug fix for contrib.deprecated eliminatation in tf version 12.
parent d1baa18c
...@@ -93,7 +93,7 @@ def tower_loss(scope): ...@@ -93,7 +93,7 @@ def tower_loss(scope):
# Remove 'tower_[0-9]/' from the name in case this is a multi-GPU training # Remove 'tower_[0-9]/' from the name in case this is a multi-GPU training
# session. This helps the clarity of presentation on tensorboard. # session. This helps the clarity of presentation on tensorboard.
loss_name = re.sub('%s_[0-9]*/' % cifar10.TOWER_NAME, '', l.op.name) loss_name = re.sub('%s_[0-9]*/' % cifar10.TOWER_NAME, '', l.op.name)
tf.contrib.deprecated.scalar_summary(loss_name, l) tf.scalar_summary(loss_name, l)
return total_loss return total_loss
...@@ -187,13 +187,13 @@ def train(): ...@@ -187,13 +187,13 @@ def train():
grads = average_gradients(tower_grads) grads = average_gradients(tower_grads)
# Add a summary to track the learning rate. # Add a summary to track the learning rate.
summaries.append(tf.contrib.deprecated.scalar_summary('learning_rate', lr)) summaries.append(tf.scalar_summary('learning_rate', lr))
# Add histograms for gradients. # Add histograms for gradients.
for grad, var in grads: for grad, var in grads:
if grad is not None: if grad is not None:
summaries.append( summaries.append(
tf.contrib.deprecated.histogram_summary(var.op.name + '/gradients', tf.histogram_summary(var.op.name + '/gradients',
grad)) grad))
# Apply the gradients to adjust the shared variables. # Apply the gradients to adjust the shared variables.
...@@ -202,7 +202,7 @@ def train(): ...@@ -202,7 +202,7 @@ def train():
# Add histograms for trainable variables. # Add histograms for trainable variables.
for var in tf.trainable_variables(): for var in tf.trainable_variables():
summaries.append( summaries.append(
tf.contrib.deprecated.histogram_summary(var.op.name, var)) tf.histogram_summary(var.op.name, var))
# Track the moving averages of all trainable variables. # Track the moving averages of all trainable variables.
variable_averages = tf.train.ExponentialMovingAverage( variable_averages = tf.train.ExponentialMovingAverage(
...@@ -216,7 +216,7 @@ def train(): ...@@ -216,7 +216,7 @@ def train():
saver = tf.train.Saver(tf.global_variables()) saver = tf.train.Saver(tf.global_variables())
# Build the summary operation from the last tower summaries. # Build the summary operation from the last tower summaries.
summary_op = tf.contrib.deprecated.merge_summary(summaries) summary_op = tf.merge_summary(summaries)
# Build an initialization operation to run below. # Build an initialization operation to run below.
init = tf.global_variables_initializer() init = tf.global_variables_initializer()
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment