Commit fc1c9b1e authored by Martin Wicke's avatar Martin Wicke Committed by GitHub
Browse files

Merge pull request #864 from tensorflow/fix-xent

Fix *_cross_entropy_with_logits calls
parents 4b53df3c f4161b6f
...@@ -341,7 +341,7 @@ def loss_fun(logits, labels): ...@@ -341,7 +341,7 @@ def loss_fun(logits, labels):
# Calculate the cross entropy between labels and predictions # Calculate the cross entropy between labels and predictions
labels = tf.cast(labels, tf.int64) labels = tf.cast(labels, tf.int64)
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits( cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(
logits, labels, name='cross_entropy_per_example') logits=logits, labels=labels, name='cross_entropy_per_example')
# Calculate the average cross entropy loss across the batch. # Calculate the average cross entropy loss across the batch.
cross_entropy_mean = tf.reduce_mean(cross_entropy, name='cross_entropy') cross_entropy_mean = tf.reduce_mean(cross_entropy, name='cross_entropy')
......
...@@ -163,8 +163,8 @@ def cross_entropy_loss(logits, one_hot_labels, label_smoothing=0, ...@@ -163,8 +163,8 @@ def cross_entropy_loss(logits, one_hot_labels, label_smoothing=0,
smooth_positives = 1.0 - label_smoothing smooth_positives = 1.0 - label_smoothing
smooth_negatives = label_smoothing / num_classes smooth_negatives = label_smoothing / num_classes
one_hot_labels = one_hot_labels * smooth_positives + smooth_negatives one_hot_labels = one_hot_labels * smooth_positives + smooth_negatives
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits, cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits=logits,
one_hot_labels, labels=one_hot_labels,
name='xentropy') name='xentropy')
weight = tf.convert_to_tensor(weight, weight = tf.convert_to_tensor(weight,
dtype=logits.dtype.base_dtype, dtype=logits.dtype.base_dtype,
......
...@@ -454,7 +454,7 @@ class VGSLImageModel(object): ...@@ -454,7 +454,7 @@ class VGSLImageModel(object):
self.labels = tf.slice(self.labels, [0, 0], [-1, 1]) self.labels = tf.slice(self.labels, [0, 0], [-1, 1])
self.labels = tf.reshape(self.labels, [-1]) self.labels = tf.reshape(self.labels, [-1])
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits( cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(
logits, self.labels, name='xent') logits=logits, labels=self.labels, name='xent')
else: else:
# TODO(rays) Labels need an extra dimension for logistic, so different # TODO(rays) Labels need an extra dimension for logistic, so different
# padding functions are needed, as well as a different loss function. # padding functions are needed, as well as a different loss function.
......
...@@ -123,7 +123,7 @@ y_logits = tf.matmul(h_fc1_drop, W_fc2) + b_fc2 ...@@ -123,7 +123,7 @@ y_logits = tf.matmul(h_fc1_drop, W_fc2) + b_fc2
# %% Define loss/eval/training functions # %% Define loss/eval/training functions
cross_entropy = tf.reduce_mean( cross_entropy = tf.reduce_mean(
tf.nn.softmax_cross_entropy_with_logits(y_logits, y)) tf.nn.softmax_cross_entropy_with_logits(logits=y_logits, targets=y))
opt = tf.train.AdamOptimizer() opt = tf.train.AdamOptimizer()
optimizer = opt.minimize(cross_entropy) optimizer = opt.minimize(cross_entropy)
grads = opt.compute_gradients(cross_entropy, [b_fc_loc2]) grads = opt.compute_gradients(cross_entropy, [b_fc_loc2])
......
...@@ -286,7 +286,7 @@ def loss(logits, labels): ...@@ -286,7 +286,7 @@ def loss(logits, labels):
# Calculate the average cross entropy loss across the batch. # Calculate the average cross entropy loss across the batch.
labels = tf.cast(labels, tf.int64) labels = tf.cast(labels, tf.int64)
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits( cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(
logits, labels, name='cross_entropy_per_example') logits=logits, labels=labels, name='cross_entropy_per_example')
cross_entropy_mean = tf.reduce_mean(cross_entropy, name='cross_entropy') cross_entropy_mean = tf.reduce_mean(cross_entropy, name='cross_entropy')
tf.add_to_collection('losses', cross_entropy_mean) tf.add_to_collection('losses', cross_entropy_mean)
......
...@@ -228,7 +228,7 @@ def main(_): ...@@ -228,7 +228,7 @@ def main(_):
# Training computation: logits + cross-entropy loss. # Training computation: logits + cross-entropy loss.
logits = model(train_data_node, True) logits = model(train_data_node, True)
loss = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits( loss = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(
logits, train_labels_node)) labels=train_labels_node, logits=logits))
# L2 regularization for the fully connected parameters. # L2 regularization for the fully connected parameters.
regularizers = (tf.nn.l2_loss(fc1_weights) + tf.nn.l2_loss(fc1_biases) + regularizers = (tf.nn.l2_loss(fc1_weights) + tf.nn.l2_loss(fc1_biases) +
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment