"pytorch/vscode:/vscode.git/clone" did not exist on "eb1525ec06813a673d83c55c7ad5c55065e4aed2"
Commit c136af63 authored by Matt Rickard's avatar Matt Rickard Committed by Neal Wu
Browse files

Removed rmsprop_momentum flag, use --momentum flag

The flag description for the momentum flag states that it is `The
momentum for the MomentumOptimizer and RMSPropOptimizer`, however its
not actually used in the RMSPropOptimizer.  Instead, a separate
`rmsprop_momentum` flag was used.  This deletes that flag for
simplicity.  It was not referenced anywhere else in the repo.
parent d6b78425
...@@ -118,8 +118,6 @@ tf.app.flags.DEFINE_float( ...@@ -118,8 +118,6 @@ tf.app.flags.DEFINE_float(
'momentum', 0.9, 'momentum', 0.9,
'The momentum for the MomentumOptimizer and RMSPropOptimizer.') 'The momentum for the MomentumOptimizer and RMSPropOptimizer.')
tf.app.flags.DEFINE_float('rmsprop_momentum', 0.9, 'Momentum.')
tf.app.flags.DEFINE_float('rmsprop_decay', 0.9, 'Decay term for RMSProp.') tf.app.flags.DEFINE_float('rmsprop_decay', 0.9, 'Decay term for RMSProp.')
####################### #######################
...@@ -304,7 +302,7 @@ def _configure_optimizer(learning_rate): ...@@ -304,7 +302,7 @@ def _configure_optimizer(learning_rate):
optimizer = tf.train.RMSPropOptimizer( optimizer = tf.train.RMSPropOptimizer(
learning_rate, learning_rate,
decay=FLAGS.rmsprop_decay, decay=FLAGS.rmsprop_decay,
momentum=FLAGS.rmsprop_momentum, momentum=FLAGS.momentum,
epsilon=FLAGS.opt_epsilon) epsilon=FLAGS.opt_epsilon)
elif FLAGS.optimizer == 'sgd': elif FLAGS.optimizer == 'sgd':
optimizer = tf.train.GradientDescentOptimizer(learning_rate) optimizer = tf.train.GradientDescentOptimizer(learning_rate)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment