Skip to content

Commit 9be39bb

Browse files
Enhence optimizer. (#13004)
1 parent 7ad39c4 commit 9be39bb

File tree

1 file changed

+7
-1
lines changed

1 file changed

+7
-1
lines changed

python/paddle/fluid/optimizer.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,10 +46,12 @@ class Optimizer(object):
4646
def __init__(self,
4747
learning_rate,
4848
regularization=None,
49-
LARS_weight_decay=0.0):
49+
LARS_weight_decay=0.0,
50+
name=None):
5051
if not isinstance(learning_rate, float) and \
5152
not isinstance(learning_rate, framework.Variable):
5253
raise TypeError("learning rate should be float or Variable")
54+
self._name = name
5355
self.regularization = regularization
5456
self._learning_rate = learning_rate
5557
# the learning rate type should be inferenced from loss
@@ -153,6 +155,8 @@ def _add_accumulator(self,
153155
dtype: data type of the accumulator variable
154156
fill_value: value to initialize the accumulator variable
155157
"""
158+
if self._name is not None:
159+
name = self._name + "_" + name
156160
if (name in self._accumulators and
157161
param.name in self._accumulators[name]):
158162
raise Exception("Accumulator {} already exists for parameter {}".
@@ -181,6 +185,8 @@ def _get_accumulator(self, name, param):
181185
Returns:
182186
accumulator variable for the parameter
183187
"""
188+
if self._name is not None:
189+
name = self._name + "_" + name
184190
if (name not in self._accumulators or
185191
param.name not in self._accumulators[name]):
186192
raise Exception("Accumulator {} does not exist for parameter {}".

0 commit comments

Comments
 (0)