@@ -72,7 +72,7 @@ x = tf.placeholder(tf.float32, shape=[None, 784], name='x')
7272y_ = tf.placeholder(tf.int64, shape = [None , ], name = ' y_' )
7373
7474# Define the neural network structure
75- network = tl.layers.InputLayer(x, name = ' input_layer ' )
75+ network = tl.layers.InputLayer(x, name = ' input ' )
7676network = tl.layers.DropoutLayer(network, keep = 0.8 , name = ' drop1' )
7777network = tl.layers.DenseLayer(network, n_units = 800 , act = tf.nn.relu, name = ' relu1' )
7878network = tl.layers.DropoutLayer(network, keep = 0.5 , name = ' drop2' )
@@ -82,7 +82,7 @@ network = tl.layers.DropoutLayer(network, keep=0.5, name='drop3')
8282# The softmax is implemented internally in tl.cost.cross_entropy(y, y_) to
8383# speed up computation, so we use identity here.
8484# see tf.nn.sparse_softmax_cross_entropy_with_logits()
85- network = tl.layers.DenseLayer(network, n_units = 10 , act = tf.identity, name = ' output_layer ' )
85+ network = tl.layers.DenseLayer(network, n_units = 10 , act = tf.identity, name = ' output ' )
8686
8787# Define cost function and metric.
8888y = network.outputs
0 commit comments