Skip to content

Commit ea9e62b

Browse files
committed
optimize code
1 parent a636aa5 commit ea9e62b

File tree

1 file changed

+4
-4
lines changed

1 file changed

+4
-4
lines changed

python/paddle/fluid/optimizer.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -38,13 +38,13 @@ class Optimizer(object):
3838
def __init__(self, learning_rate, global_step=None, regularization=None):
3939
if not isinstance(learning_rate, float) and \
4040
not isinstance(learning_rate, framework.Variable):
41-
raise ValueError("learning rate should be float or Variable")
41+
raise TypeError("learning rate should be float or Variable")
4242
self._global_step = global_step
4343
self.regularization = regularization
4444
self._learning_rate = learning_rate
4545
# each program should have a independent learning rate
4646
# program -> Variable(learning_rate)
47-
self._learning_rate_map = defaultdict(lambda: None)
47+
self._learning_rate_map = dict()
4848
if isinstance(self._learning_rate, framework.Variable):
4949
self._learning_rate_map[framework.default_main_program(
5050
)] = self._learning_rate
@@ -62,7 +62,7 @@ def _create_global_learning_rate(self):
6262
return
6363
else:
6464
if not isinstance(self._learning_rate, float):
65-
raise ValueError(
65+
raise TypeError(
6666
"learning rate variable is create outside optimizer,"
6767
"can not create new learning rate variable for new program")
6868

@@ -82,7 +82,7 @@ def global_learning_rate(self, program=None):
8282
"""
8383
if program is None:
8484
program = framework.default_main_program()
85-
return self._learning_rate_map[program]
85+
return self._learning_rate_map.get(program, None)
8686

8787
def _append_optimize_op(self, block, param_and_grad):
8888
""" append optimize operator to block and return all the added optimize_op

0 commit comments

Comments
 (0)