@@ -38,13 +38,13 @@ class Optimizer(object):
38
38
def __init__ (self , learning_rate , global_step = None , regularization = None ):
39
39
if not isinstance (learning_rate , float ) and \
40
40
not isinstance (learning_rate , framework .Variable ):
41
- raise ValueError ("learning rate should be float or Variable" )
41
+ raise TypeError ("learning rate should be float or Variable" )
42
42
self ._global_step = global_step
43
43
self .regularization = regularization
44
44
self ._learning_rate = learning_rate
45
45
# each program should have a independent learning rate
46
46
# program -> Variable(learning_rate)
47
- self ._learning_rate_map = defaultdict ( lambda : None )
47
+ self ._learning_rate_map = dict ( )
48
48
if isinstance (self ._learning_rate , framework .Variable ):
49
49
self ._learning_rate_map [framework .default_main_program (
50
50
)] = self ._learning_rate
@@ -62,7 +62,7 @@ def _create_global_learning_rate(self):
62
62
return
63
63
else :
64
64
if not isinstance (self ._learning_rate , float ):
65
- raise ValueError (
65
+ raise TypeError (
66
66
"learning rate variable is create outside optimizer,"
67
67
"can not create new learning rate variable for new program" )
68
68
@@ -82,7 +82,7 @@ def global_learning_rate(self, program=None):
82
82
"""
83
83
if program is None :
84
84
program = framework .default_main_program ()
85
- return self ._learning_rate_map [ program ]
85
+ return self ._learning_rate_map . get ( program , None )
86
86
87
87
def _append_optimize_op (self , block , param_and_grad ):
88
88
""" append optimize operator to block and return all the added optimize_op
0 commit comments