Unverified Commit f3be93a7 authored by Reed's avatar Reed Committed by GitHub
Browse files

Add flags for adam hyperparameters (#5428)

parent f96099da
......@@ -333,6 +333,9 @@ def run_ncf(_):
"tpu": FLAGS.tpu,
"tpu_zone": FLAGS.tpu_zone,
"tpu_gcp_project": FLAGS.tpu_gcp_project,
"beta1": FLAGS.beta1,
"beta2": FLAGS.beta2,
"epsilon": FLAGS.epsilon,
}, batch_size=flags.FLAGS.batch_size, eval_batch_size=eval_batch_size)
# Create hooks that log information about the training and metric values
......@@ -483,6 +486,19 @@ def define_ncf_flags():
name="learning_rate", default=0.001,
help=flags_core.help_wrap("The learning rate."))
flags.DEFINE_float(
name="beta1", default=0.9,
help=flags_core.help_wrap("beta1 hyperparameter for the Adam optimizer."))
flags.DEFINE_float(
name="beta2", default=0.999,
help=flags_core.help_wrap("beta2 hyperparameter for the Adam optimizer."))
flags.DEFINE_float(
name="epsilon", default=1e-8,
help=flags_core.help_wrap("epsilon hyperparameter for the Adam "
"optimizer."))
flags.DEFINE_float(
name="hr_threshold", default=None,
help=flags_core.help_wrap(
......
......@@ -65,7 +65,9 @@ def neumf_model_fn(features, labels, mode, params):
elif mode == tf.estimator.ModeKeys.TRAIN:
labels = tf.cast(labels, tf.int32)
optimizer = tf.train.AdamOptimizer(learning_rate=params["learning_rate"])
optimizer = tf.train.AdamOptimizer(
learning_rate=params["learning_rate"], beta1=params["beta1"],
beta2=params["beta2"], epsilon=params["epsilon"])
if params["use_tpu"]:
optimizer = tf.contrib.tpu.CrossShardOptimizer(optimizer)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment