Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
ModelZoo
ResNet50_tensorflow
Commits
fc1c9b1e
Commit
fc1c9b1e
authored
Jan 08, 2017
by
Martin Wicke
Committed by
GitHub
Jan 08, 2017
Browse files
Merge pull request #864 from tensorflow/fix-xent
Fix *_cross_entropy_with_logits calls
parents
4b53df3c
f4161b6f
Changes
6
Hide whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
7 additions
and
7 deletions
+7
-7
differential_privacy/multiple_teachers/deep_cnn.py
differential_privacy/multiple_teachers/deep_cnn.py
+1
-1
inception/inception/slim/losses.py
inception/inception/slim/losses.py
+2
-2
street/python/vgsl_model.py
street/python/vgsl_model.py
+1
-1
transformer/cluttered_mnist.py
transformer/cluttered_mnist.py
+1
-1
tutorials/image/cifar10/cifar10.py
tutorials/image/cifar10/cifar10.py
+1
-1
tutorials/image/mnist/convolutional.py
tutorials/image/mnist/convolutional.py
+1
-1
No files found.
differential_privacy/multiple_teachers/deep_cnn.py
View file @
fc1c9b1e
...
...
@@ -341,7 +341,7 @@ def loss_fun(logits, labels):
# Calculate the cross entropy between labels and predictions
labels
=
tf
.
cast
(
labels
,
tf
.
int64
)
cross_entropy
=
tf
.
nn
.
sparse_softmax_cross_entropy_with_logits
(
logits
,
labels
,
name
=
'cross_entropy_per_example'
)
logits
=
logits
,
labels
=
labels
,
name
=
'cross_entropy_per_example'
)
# Calculate the average cross entropy loss across the batch.
cross_entropy_mean
=
tf
.
reduce_mean
(
cross_entropy
,
name
=
'cross_entropy'
)
...
...
inception/inception/slim/losses.py
View file @
fc1c9b1e
...
...
@@ -163,8 +163,8 @@ def cross_entropy_loss(logits, one_hot_labels, label_smoothing=0,
smooth_positives
=
1.0
-
label_smoothing
smooth_negatives
=
label_smoothing
/
num_classes
one_hot_labels
=
one_hot_labels
*
smooth_positives
+
smooth_negatives
cross_entropy
=
tf
.
nn
.
softmax_cross_entropy_with_logits
(
logits
,
one_hot_labels
,
cross_entropy
=
tf
.
nn
.
softmax_cross_entropy_with_logits
(
logits
=
logits
,
labels
=
one_hot_labels
,
name
=
'xentropy'
)
weight
=
tf
.
convert_to_tensor
(
weight
,
dtype
=
logits
.
dtype
.
base_dtype
,
...
...
street/python/vgsl_model.py
View file @
fc1c9b1e
...
...
@@ -454,7 +454,7 @@ class VGSLImageModel(object):
self
.
labels
=
tf
.
slice
(
self
.
labels
,
[
0
,
0
],
[
-
1
,
1
])
self
.
labels
=
tf
.
reshape
(
self
.
labels
,
[
-
1
])
cross_entropy
=
tf
.
nn
.
sparse_softmax_cross_entropy_with_logits
(
logits
,
self
.
labels
,
name
=
'xent'
)
logits
=
logits
,
labels
=
self
.
labels
,
name
=
'xent'
)
else
:
# TODO(rays) Labels need an extra dimension for logistic, so different
# padding functions are needed, as well as a different loss function.
...
...
transformer/cluttered_mnist.py
View file @
fc1c9b1e
...
...
@@ -123,7 +123,7 @@ y_logits = tf.matmul(h_fc1_drop, W_fc2) + b_fc2
# %% Define loss/eval/training functions
cross_entropy
=
tf
.
reduce_mean
(
tf
.
nn
.
softmax_cross_entropy_with_logits
(
y_logits
,
y
))
tf
.
nn
.
softmax_cross_entropy_with_logits
(
logits
=
y_logits
,
targets
=
y
))
opt
=
tf
.
train
.
AdamOptimizer
()
optimizer
=
opt
.
minimize
(
cross_entropy
)
grads
=
opt
.
compute_gradients
(
cross_entropy
,
[
b_fc_loc2
])
...
...
tutorials/image/cifar10/cifar10.py
View file @
fc1c9b1e
...
...
@@ -286,7 +286,7 @@ def loss(logits, labels):
# Calculate the average cross entropy loss across the batch.
labels
=
tf
.
cast
(
labels
,
tf
.
int64
)
cross_entropy
=
tf
.
nn
.
sparse_softmax_cross_entropy_with_logits
(
logits
,
labels
,
name
=
'cross_entropy_per_example'
)
logits
=
logits
,
labels
=
labels
,
name
=
'cross_entropy_per_example'
)
cross_entropy_mean
=
tf
.
reduce_mean
(
cross_entropy
,
name
=
'cross_entropy'
)
tf
.
add_to_collection
(
'losses'
,
cross_entropy_mean
)
...
...
tutorials/image/mnist/convolutional.py
View file @
fc1c9b1e
...
...
@@ -228,7 +228,7 @@ def main(_):
# Training computation: logits + cross-entropy loss.
logits
=
model
(
train_data_node
,
True
)
loss
=
tf
.
reduce_mean
(
tf
.
nn
.
sparse_softmax_cross_entropy_with_logits
(
l
ogits
,
train_labels_node
))
l
abels
=
train_labels_node
,
logits
=
logits
))
# L2 regularization for the fully connected parameters.
regularizers
=
(
tf
.
nn
.
l2_loss
(
fc1_weights
)
+
tf
.
nn
.
l2_loss
(
fc1_biases
)
+
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment