Commit 09e0cdcc authored by Neal Wu's avatar Neal Wu Committed by GitHub
Browse files

Merge pull request #873 from tensorflow/add-arguments

Updated calls to '..._cross_entropy_with_logits' to add arguments
parents fc1c9b1e cc1fb668
...@@ -263,9 +263,9 @@ class Word2Vec(object): ...@@ -263,9 +263,9 @@ class Word2Vec(object):
# cross-entropy(logits, labels) # cross-entropy(logits, labels)
opts = self._options opts = self._options
true_xent = tf.nn.sigmoid_cross_entropy_with_logits( true_xent = tf.nn.sigmoid_cross_entropy_with_logits(
true_logits, tf.ones_like(true_logits)) labels=tf.ones_like(true_logits), logits=true_logits)
sampled_xent = tf.nn.sigmoid_cross_entropy_with_logits( sampled_xent = tf.nn.sigmoid_cross_entropy_with_logits(
sampled_logits, tf.zeros_like(sampled_logits)) labels=tf.zeros_like(sampled_logits), logits=sampled_logits)
# NCE-loss is the sum of the true and noise (sampled words) # NCE-loss is the sum of the true and noise (sampled words)
# contributions, averaged over the batch. # contributions, averaged over the batch.
......
...@@ -286,7 +286,7 @@ def loss(logits, labels): ...@@ -286,7 +286,7 @@ def loss(logits, labels):
# Calculate the average cross entropy loss across the batch. # Calculate the average cross entropy loss across the batch.
labels = tf.cast(labels, tf.int64) labels = tf.cast(labels, tf.int64)
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits( cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(
logits=logits, labels=labels, name='cross_entropy_per_example') labels=labels, logits=logits, name='cross_entropy_per_example')
cross_entropy_mean = tf.reduce_mean(cross_entropy, name='cross_entropy') cross_entropy_mean = tf.reduce_mean(cross_entropy, name='cross_entropy')
tf.add_to_collection('losses', cross_entropy_mean) tf.add_to_collection('losses', cross_entropy_mean)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment