Skip to content

Commit 5f70629

Browse files
committed
Better function name
1 parent 9db107d commit 5f70629

File tree

1 file changed

+6
-6
lines changed

1 file changed

+6
-6
lines changed

python/paddle/fluid/learning_rate_decay.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@
3030
"""
3131

3232

33-
def float_global_step():
33+
def _decay_step_counter():
3434
# the first global step is zero in learning rate decay
3535
global_step = layers.autoincreased_step_counter(
3636
counter_name='@LR_DECAY_COUNTER@', begin=0, step=1)
@@ -55,7 +55,7 @@ def exponential_decay(learning_rate, decay_steps, decay_rate, staircase=False):
5555
Returns:
5656
The decayed learning rate
5757
"""
58-
global_step = float_global_step()
58+
global_step = _decay_step_counter()
5959

6060
with init_on_cpu():
6161
# update learning_rate
@@ -85,7 +85,7 @@ def natural_exp_decay(learning_rate, decay_steps, decay_rate, staircase=False):
8585
Returns:
8686
The decayed learning rate
8787
"""
88-
global_step = float_global_step()
88+
global_step = _decay_step_counter()
8989

9090
with init_on_cpu():
9191
div_res = global_step / decay_steps
@@ -114,7 +114,7 @@ def inverse_time_decay(learning_rate, decay_steps, decay_rate, staircase=False):
114114
Returns:
115115
The decayed learning rate
116116
"""
117-
global_step = float_global_step()
117+
global_step = _decay_step_counter()
118118

119119
with init_on_cpu():
120120
div_res = global_step / decay_steps
@@ -151,7 +151,7 @@ def polynomial_decay(learning_rate,
151151
Returns:
152152
The decayed learning rate
153153
"""
154-
global_step = float_global_step()
154+
global_step = _decay_step_counter()
155155

156156
with init_on_cpu():
157157
if cycle:
@@ -193,7 +193,7 @@ def piecewise_decay(boundaries, values):
193193
if len(values) - len(boundaries) != 1:
194194
raise ValueError("len(values) - len(boundaries) should be 1")
195195

196-
global_step = float_global_step()
196+
global_step = _decay_step_counter()
197197

198198
with init_on_cpu():
199199
lr = layers.create_global_var(

0 commit comments

Comments
 (0)