Skip to content

Commit a2abe19

Browse files
committed
update mnist simple
1 parent 44a2f7c commit a2abe19

File tree

2 files changed

+5
-6
lines changed

2 files changed

+5
-6
lines changed

README.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ x = tf.placeholder(tf.float32, shape=[None, 784], name='x')
7272
y_ = tf.placeholder(tf.int64, shape=[None, ], name='y_')
7373

7474
# Define the neural network structure
75-
network = tl.layers.InputLayer(x, name='input_layer')
75+
network = tl.layers.InputLayer(x, name='input')
7676
network = tl.layers.DropoutLayer(network, keep=0.8, name='drop1')
7777
network = tl.layers.DenseLayer(network, n_units=800, act = tf.nn.relu, name='relu1')
7878
network = tl.layers.DropoutLayer(network, keep=0.5, name='drop2')
@@ -82,7 +82,7 @@ network = tl.layers.DropoutLayer(network, keep=0.5, name='drop3')
8282
# The softmax is implemented internally in tl.cost.cross_entropy(y, y_) to
8383
# speed up computation, so we use identity here.
8484
# see tf.nn.sparse_softmax_cross_entropy_with_logits()
85-
network = tl.layers.DenseLayer(network, n_units=10, act = tf.identity, name='output_layer')
85+
network = tl.layers.DenseLayer(network, n_units=10, act=tf.identity, name='output')
8686

8787
# Define cost function and metric.
8888
y = network.outputs

example/tutorial_mnist_simple.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
y_ = tf.placeholder(tf.int64, shape=[None, ], name='y_')
1717

1818
# define the network
19-
network = tl.layers.InputLayer(x, name='input_layer')
19+
network = tl.layers.InputLayer(x, name='input')
2020
network = tl.layers.DropoutLayer(network, keep=0.8, name='drop1')
2121
network = tl.layers.DenseLayer(network, n_units=800,
2222
act = tf.nn.relu, name='relu1')
@@ -28,12 +28,11 @@
2828
# speed up computation, so we use identity here.
2929
# see tf.nn.sparse_softmax_cross_entropy_with_logits()
3030
network = tl.layers.DenseLayer(network, n_units=10,
31-
act = tf.identity,
32-
name='output_layer')
31+
act=tf.identity, name='output')
3332

3433
# define cost function and metric.
3534
y = network.outputs
36-
cost = tl.cost.cross_entropy(y, y_, name='xentropy')
35+
cost = tl.cost.cross_entropy(y, y_, name='cost')
3736
correct_prediction = tf.equal(tf.argmax(y, 1), y_)
3837
acc = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
3938
y_op = tf.argmax(tf.nn.softmax(y), 1)

0 commit comments

Comments
 (0)