@@ -722,9 +722,10 @@ def test_update_dense_parameter(self):
722
722
# test fetch all the variables of global_block
723
723
724
724
import paddle .dataset .flowers as flowers
725
+ import math
725
726
726
727
727
- def lenet (data , class_dim ):
728
+ def Lenet (data , class_dim ):
728
729
conv1 = fluid .layers .conv2d (data , 32 , 5 , 1 , act = None )
729
730
bn1 = fluid .layers .batch_norm (conv1 , act = 'relu' )
730
731
pool1 = fluid .layers .pool2d (bn1 , 2 , 'max' , 2 )
@@ -774,25 +775,25 @@ def parallel_exe(self, train_inputs, seed):
774
775
fetch_list = []
775
776
all_vars = main .global_block ().vars
776
777
for k , v in all_vars .iteritems ():
777
- if 'velocity ' not in k :
778
+ if 'tmp ' not in k and k [ 0 ] is not '_' or v . persistable :
778
779
fetch_list .append (k )
779
780
780
781
for data in train_inputs :
781
782
ret = pe .run (fetch_list , feed = feeder .feed (data ))
782
783
for i in range (len (fetch_list )):
783
- print ("%s - %s" % (fetch_list [i ], np .sum (ret [i ])))
784
+ assert not math .isnan (np .sum (ret [i ])) and \
785
+ not math .isinf (np .sum (ret [i ]))
784
786
785
787
def test_update_sparse_parameter (self ):
786
788
tst_reader = paddle .batch (flowers .test (use_xmap = False ), batch_size = 16 )
787
789
tst_reader_iter = tst_reader ()
788
790
789
- seed = 100
790
- iters = 4
791
+ iters = 3
791
792
train_inputs = []
792
793
for i in range (iters ):
793
794
train_inputs .append (tst_reader_iter .next ())
794
795
795
- self .parallel_exe (train_inputs , seed )
796
+ self .parallel_exe (train_inputs , seed = 1 )
796
797
797
798
798
799
if __name__ == '__main__' :
0 commit comments