Commit bf321a6f authored by Neal Wu's avatar Neal Wu
Browse files

Logits and labels were out of order when computing loss

parent 9dd2c618
......@@ -470,10 +470,11 @@ def main(_):
#############################
if 'AuxLogits' in end_points:
tf.losses.softmax_cross_entropy(
end_points['AuxLogits'], labels,
logits=end_points['AuxLogits'], onehot_labels=labels,
label_smoothing=FLAGS.label_smoothing, weights=0.4, scope='aux_loss')
tf.losses.softmax_cross_entropy(
logits, labels, label_smoothing=FLAGS.label_smoothing, weights=1.0)
logits=logits, onehot_labels=labels,
label_smoothing=FLAGS.label_smoothing, weights=1.0)
return end_points
# Gather initial summaries.
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment