".github/git@developer.sourcefind.cn:change/sglang.git" did not exist on "35ca04d2fa2ff78a74ec526bb916688a76b04120"
Commit 62cea658 authored by Mostafa Rahmani's avatar Mostafa Rahmani Committed by GitHub
Browse files

Update cifar10.py

bug fix for contrib.deprecated eliminatation in tf version 12.
parent bb5798c7
...@@ -91,8 +91,8 @@ def _activation_summary(x): ...@@ -91,8 +91,8 @@ def _activation_summary(x):
# Remove 'tower_[0-9]/' from the name in case this is a multi-GPU training # Remove 'tower_[0-9]/' from the name in case this is a multi-GPU training
# session. This helps the clarity of presentation on tensorboard. # session. This helps the clarity of presentation on tensorboard.
tensor_name = re.sub('%s_[0-9]*/' % TOWER_NAME, '', x.op.name) tensor_name = re.sub('%s_[0-9]*/' % TOWER_NAME, '', x.op.name)
tf.contrib.deprecated.histogram_summary(tensor_name + '/activations', x) tf.histogram_summary(tensor_name + '/activations', x)
tf.contrib.deprecated.scalar_summary(tensor_name + '/sparsity', tf.scalar_summary(tensor_name + '/sparsity',
tf.nn.zero_fraction(x)) tf.nn.zero_fraction(x))
...@@ -317,8 +317,8 @@ def _add_loss_summaries(total_loss): ...@@ -317,8 +317,8 @@ def _add_loss_summaries(total_loss):
for l in losses + [total_loss]: for l in losses + [total_loss]:
# Name each loss as '(raw)' and name the moving average version of the loss # Name each loss as '(raw)' and name the moving average version of the loss
# as the original loss name. # as the original loss name.
tf.contrib.deprecated.scalar_summary(l.op.name + ' (raw)', l) tf.scalar_summary(l.op.name + ' (raw)', l)
tf.contrib.deprecated.scalar_summary(l.op.name, loss_averages.average(l)) tf.scalar_summary(l.op.name, loss_averages.average(l))
return loss_averages_op return loss_averages_op
...@@ -346,7 +346,7 @@ def train(total_loss, global_step): ...@@ -346,7 +346,7 @@ def train(total_loss, global_step):
decay_steps, decay_steps,
LEARNING_RATE_DECAY_FACTOR, LEARNING_RATE_DECAY_FACTOR,
staircase=True) staircase=True)
tf.contrib.deprecated.scalar_summary('learning_rate', lr) tf.scalar_summary('learning_rate', lr)
# Generate moving averages of all losses and associated summaries. # Generate moving averages of all losses and associated summaries.
loss_averages_op = _add_loss_summaries(total_loss) loss_averages_op = _add_loss_summaries(total_loss)
...@@ -361,12 +361,12 @@ def train(total_loss, global_step): ...@@ -361,12 +361,12 @@ def train(total_loss, global_step):
# Add histograms for trainable variables. # Add histograms for trainable variables.
for var in tf.trainable_variables(): for var in tf.trainable_variables():
tf.contrib.deprecated.histogram_summary(var.op.name, var) tf.histogram_summary(var.op.name, var)
# Add histograms for gradients. # Add histograms for gradients.
for grad, var in grads: for grad, var in grads:
if grad is not None: if grad is not None:
tf.contrib.deprecated.histogram_summary(var.op.name + '/gradients', grad) tf.histogram_summary(var.op.name + '/gradients', grad)
# Track the moving averages of all trainable variables. # Track the moving averages of all trainable variables.
variable_averages = tf.train.ExponentialMovingAverage( variable_averages = tf.train.ExponentialMovingAverage(
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment