Skip to content

Commit d9bd47b

Browse files
authored
Merge pull request #90 from YinongLong/master
update cross entropy by @YinongLong
2 parents f5316d7 + 7cc5f2f commit d9bd47b

File tree

2 files changed

+2
-2
lines changed

2 files changed

+2
-2
lines changed

example/tutorial_mnist_simple.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@
3333

3434
# define cost function and metric.
3535
y = network.outputs
36-
cost = tl.cost.cross_entropy(y, y_)
36+
cost = tl.cost.cross_entropy(y, y_, name='xentropy')
3737
correct_prediction = tf.equal(tf.argmax(y, 1), y_)
3838
acc = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
3939
y_op = tf.argmax(tf.nn.softmax(y), 1)

tensorlayer/cost.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ def cross_entropy(output, target, name=None):
3636
if tf.__version__ <= "0.12":
3737
return tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(logits=output, targets=target, name=name))
3838
else: # TF 1.0
39-
return tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(labels=target, logits=outputs, name=name))
39+
return tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(labels=target, logits=output, name=name))
4040

4141

4242

0 commit comments

Comments
 (0)