We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 09970c7 commit e837c61Copy full SHA for e837c61
tensorlayer/cost.py
@@ -30,7 +30,7 @@ def cross_entropy(output, target, name="cross_entropy_loss"):
30
- The code is borrowed from: `here <https://en.wikipedia.org/wiki/Cross_entropy>`_.
31
"""
32
if tf.__version__ <= "0.12":
33
- return tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(logits=output, labels=target, name=name))
+ return tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(logits=output, targets=target, name=name))
34
else: # TF 1.0
35
return tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(labels=target, logits=outputs, name=name))
36
0 commit comments