Unverified Commit 71c196c1 authored by Mark Daoust's avatar Mark Daoust Committed by GitHub
Browse files

Merge pull request #4837 from mdanatg/master

 Update the training schedule for better convergence
parents 6adf454e e6f7756d
......@@ -740,7 +740,7 @@
"@autograph.convert(recursive=True)\n",
"def train(train_ds, test_ds, hp):\n",
" m = mlp_model((28 * 28,))\n",
" opt = tf.train.MomentumOptimizer(hp.learning_rate, 0.9)\n",
" opt = tf.train.AdamOptimizer(hp.learning_rate)\n",
" \n",
" # We'd like to save our losses to a list. In order for AutoGraph\n",
" # to convert these lists into their graph equivalent,\n",
......@@ -802,7 +802,7 @@
"source": [
"with tf.Graph().as_default() as g:\n",
" hp = tf.contrib.training.HParams(\n",
" learning_rate=0.05,\n",
" learning_rate=0.005,\n",
" max_steps=500,\n",
" )\n",
" train_ds = setup_mnist_data(True, 50)\n",
......@@ -837,4 +837,4 @@
"outputs": []
}
]
}
}
\ No newline at end of file
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment