Skip to content

Commit 94d20f7

Browse files
authored
Remove tl.layers.initialize_global_variables(sess) (#931)
* update sampling layers * upadte zoom * fix bug zoom * typo * fix bug affine_transform_cv2 x and y * fix bug crop when crop size equal to image size * fix file docs typo * fix bug instance norm * fix docs * update examples , init variables * changelog
1 parent 3405b11 commit 94d20f7

31 files changed

+40
-41
lines changed

CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -72,6 +72,7 @@ To release a new version, please update the changelog as followed:
7272
### Added
7373

7474
### Changed
75+
- remove `tl.layers.initialize_global_variables(sess)` (PR #931)
7576

7677
### Dependencies Update
7778
- nltk>=3.3,<3.4 => nltk>=3.3,<3.5 (PR #892)
@@ -87,6 +88,7 @@ To release a new version, please update the changelog as followed:
8788
### Security
8889

8990
### Contributors
91+
@zsdonghao: #931
9092

9193
## [1.11.1] - 2018-11-15
9294

docs/modules/layers.rst

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ All TensorLayer layers have a number of properties in common:
3939

4040
All TensorLayer layers have a number of methods in common:
4141

42-
- ``layer.print_params()`` : print network variable information in order (after ``tl.layers.initialize_global_variables(sess)``). alternatively, print all variables by ``tl.layers.print_all_variables()``.
42+
- ``layer.print_params()`` : print network variable information in order (after ``sess.run(tf.global_variables_initializer())``). alternatively, print all variables by ``tl.layers.print_all_variables()``.
4343
- ``layer.print_layers()`` : print network layer information in order.
4444
- ``layer.count_params()`` : print the number of parameters in the network.
4545

@@ -89,7 +89,7 @@ To count the number of parameters in a network, run ``network.count_params()``.
8989
train_op = tf.train.AdamOptimizer(learning_rate, beta1=0.9, beta2=0.999,
9090
epsilon=1e-08, use_locking=False).minimize(cost, var_list = train_params)
9191
92-
tl.layers.initialize_global_variables(sess)
92+
sess.run(tf.global_variables_initializer())
9393
9494
network.print_params()
9595
network.print_layers()

examples/basic_tutorials/tutorial_cifar10_placeholder.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -131,7 +131,7 @@ def distort_fn(x, is_train=False):
131131
train_op = tf.train.AdamOptimizer(learning_rate, beta1=0.9, beta2=0.999, epsilon=1e-08,
132132
use_locking=False).minimize(cost, var_list=train_params)
133133

134-
tl.layers.initialize_global_variables(sess)
134+
sess.run(tf.global_variables_initializer())
135135

136136
network.print_params(False)
137137
network.print_layers()

examples/basic_tutorials/tutorial_cifar10_tfrecord.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -277,7 +277,7 @@ def model_batch_norm(x_crop, y_, reuse, is_train):
277277
with tf.device('/gpu:0'): # <-- remove it if you don't have GPU
278278
train_op = tf.train.AdamOptimizer(learning_rate).minimize(cost)
279279

280-
tl.layers.initialize_global_variables(sess)
280+
sess.run(tf.global_variables_initializer())
281281
if resume:
282282
print("Load existing model " + "!" * 10)
283283
saver = tf.train.Saver()

examples/basic_tutorials/tutorial_mlp_dropout1.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@
3737
train_op = tf.train.AdamOptimizer(learning_rate=0.0001).minimize(cost, var_list=train_params)
3838

3939
# initialize all variables in the session
40-
tl.layers.initialize_global_variables(sess)
40+
sess.run(tf.global_variables_initializer())
4141

4242
# print network information
4343
network.print_params()

examples/basic_tutorials/tutorial_mlp_dropout2.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ def mlp(x, is_train=True, reuse=False):
4646
train_op = tf.train.AdamOptimizer(learning_rate=0.0001).minimize(cost, var_list=train_params)
4747

4848
# initialize all variables in the session
49-
tl.layers.initialize_global_variables(sess)
49+
sess.run(tf.global_variables_initializer())
5050

5151
n_epoch = 500
5252
batch_size = 500

examples/basic_tutorials/tutorial_mnist_autoencoder_cnn.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ def main_test_layers(model='relu'):
7676
print_freq = 5
7777
train_op = tf.train.AdamOptimizer(learning_rate).minimize(cost)
7878

79-
tl.layers.initialize_global_variables(sess)
79+
sess.run(tf.global_variables_initializer())
8080

8181
net.print_params()
8282
net.print_layers()
@@ -179,7 +179,7 @@ def main_test_denoise_AE(model='relu'):
179179
recon_layer1 = tl.layers.ReconLayer(net, x_recon=x, n_units=784, act=tf.nn.sigmoid, name='recon_layer1')
180180

181181
# ready to train
182-
tl.layers.initialize_global_variables(sess)
182+
sess.run(tf.global_variables_initializer())
183183

184184
# print all params
185185
print("All net Params")
@@ -253,7 +253,7 @@ def main_test_stacked_denoise_AE(model='relu'):
253253
train_op = tf.train.AdamOptimizer(learning_rate).minimize(cost, var_list=train_params)
254254

255255
# Initialize all variables including weights, biases and the variables in train_op
256-
tl.layers.initialize_global_variables(sess)
256+
sess.run(tf.global_variables_initializer())
257257

258258
# Pre-train
259259
print("\nAll net Params before pre-train")
@@ -417,7 +417,7 @@ def main_test_cnn_layer():
417417
train_params = net.all_params
418418
train_op = tf.train.AdamOptimizer(learning_rate).minimize(cost, var_list=train_params)
419419

420-
tl.layers.initialize_global_variables(sess)
420+
sess.run(tf.global_variables_initializer())
421421
net.print_params()
422422
net.print_layers()
423423

examples/basic_tutorials/tutorial_mnist_float16.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ def model(x, is_train=True, reuse=False):
6464
use_locking=False).minimize(cost, var_list=train_params)
6565

6666
# initialize all variables in the session
67-
tl.layers.initialize_global_variables(sess)
67+
sess.run(tf.global_variables_initializer())
6868

6969
# train the network
7070
n_epoch = 500

examples/basic_tutorials/tutorial_mnist_simple.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@
3939
train_op = tf.train.AdamOptimizer(learning_rate=0.0001).minimize(cost, var_list=train_params)
4040

4141
# initialize all variables in the session
42-
tl.layers.initialize_global_variables(sess)
42+
sess.run(tf.global_variables_initializer())
4343

4444
# print network information
4545
network.print_params()

examples/database/task_script.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ def mlp(x, is_train=True, reuse=False):
5353
train_op = tf.train.AdamOptimizer(learning_rate=0.0001).minimize(cost, var_list=train_params)
5454

5555
# initialize all variables in the session
56-
tl.layers.initialize_global_variables(sess)
56+
sess.run(tf.global_variables_initializer())
5757

5858
# train the network
5959
tl.utils.fit(

0 commit comments

Comments
 (0)