"docs/vscode:/vscode.git/clone" did not exist on "83c104b18823ea52dba4e90e6a3ca6c54ca037a4"
Commit 37d31ec1 authored by Neal Wu's avatar Neal Wu Committed by GitHub
Browse files

Merge pull request #783 from dweekly/master

Update sum_of_squares (TF 0.10) to mean_squared_error (TF 0.12)
parents 8a22c8ae 48f5b4da
......@@ -232,7 +232,7 @@
},
"outputs": [],
"source": [
"# The following snippet trains the regression model using a sum_of_squares loss.\n",
"# The following snippet trains the regression model using a mean_squared_error loss.\n",
"ckpt_dir = '/tmp/regression_model/'\n",
"\n",
"with tf.Graph().as_default():\n",
......@@ -244,7 +244,7 @@
" predictions, nodes = regression_model(inputs, is_training=True)\n",
"\n",
" # Add the loss function to the graph.\n",
" loss = slim.losses.sum_of_squares(predictions, targets)\n",
" loss = tf.losses.mean_squared_error(labels=targets, predictions=predictions)\n",
" \n",
" # The total loss is the uers's loss plus any regularization losses.\n",
" total_loss = slim.losses.get_total_loss()\n",
......@@ -289,12 +289,12 @@
" predictions, end_points = regression_model(inputs, is_training=True)\n",
"\n",
" # Add multiple loss nodes.\n",
" sum_of_squares_loss = slim.losses.sum_of_squares(predictions, targets)\n",
" mean_squared_error_loss = tf.losses.mean_squared_error(labels=targets, predictions=predictions)\n",
" absolute_difference_loss = slim.losses.absolute_difference(predictions, targets)\n",
"\n",
" # The following two ways to compute the total loss are equivalent\n",
" regularization_loss = tf.add_n(slim.losses.get_regularization_losses())\n",
" total_loss1 = sum_of_squares_loss + absolute_difference_loss + regularization_loss\n",
" total_loss1 = mean_squared_error_loss + absolute_difference_loss + regularization_loss\n",
"\n",
" # Regularization Loss is included in the total loss by default.\n",
" # This is good for training, but not for testing.\n",
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment