Skip to content

Commit 9601c2f

Browse files
committed
Merge branch 'develop' of github.com:baidu/Paddle into feature/add_sum_cost_in_args
2 parents e3d4da2 + 6951c8a commit 9601c2f

File tree

3 files changed

+26
-18
lines changed

3 files changed

+26
-18
lines changed

python/paddle/trainer_config_helpers/__init__.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,4 @@
2020
from networks import *
2121
from optimizers import *
2222
from attrs import *
23-
24-
# This will enable operator overload for LayerOutput
25-
import math as layer_math
23+
import layer_math

python/paddle/trainer_config_helpers/attrs.py

Lines changed: 25 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -19,34 +19,34 @@
1919

2020

2121
def convert_and_compare(x, Type):
22-
"""
23-
Convert x to be the same type as Type and then convert back to
24-
check whether there is a loss of information
25-
:param x: object to be checked
26-
:param Type: target type to check x over
27-
22+
"""
23+
Convert x to be the same type as Type and then convert back to
24+
check whether there is a loss of information
25+
:param x: object to be checked
26+
:param Type: target type to check x over
27+
2828
"""
2929
return type(x)(Type(x)) == x
3030

3131

3232
def is_compatible_with(x, Type):
33-
"""
34-
Check if x has a type compatible with Type
35-
:param x: object to be checked
36-
:param Type: target type to check x over
37-
33+
"""
34+
Check if x has a type compatible with Type
35+
:param x: object to be checked
36+
:param Type: target type to check x over
37+
3838
"""
3939
if type(x) == Type:
4040
return True
4141
try:
4242
if float == Type or int == Type:
43-
# avoid those types that can be converted to float/int but not very
44-
# meaningful and could potentially lead to error
45-
# i.e., str and bool typed value should not be used for initializing float/int variable
43+
# avoid those types that can be converted to float/int but not very
44+
# meaningful and could potentially lead to error
45+
# i.e., str and bool typed value should not be used for initializing float/int variable
4646
if not isinstance(x, str) and not isinstance(x, bool):
4747
return convert_and_compare(x, Type)
4848
elif bool == Type:
49-
# should not use string type to initialize bool variable
49+
# should not use string type to initialize bool variable
5050
if not isinstance(x, str):
5151
return convert_and_compare(x, Type)
5252
else:
@@ -88,6 +88,10 @@ class ParameterAttribute(object):
8888
:type learning_rate: float or None
8989
:param momentum: The parameter momentum. None means use global value.
9090
:type momentum: float or None
91+
:param gradient_clipping_threshold: gradient clipping threshold. If gradient
92+
value larger than some value, will be
93+
clipped.
94+
:type gradient_clipping_threshold: float
9195
:param sparse_update: Enable sparse update for this parameter. It will
9296
enable both local and remote sparse update.
9397
:type sparse_update: bool
@@ -104,6 +108,7 @@ def __init__(self,
104108
l2_rate=None,
105109
learning_rate=None,
106110
momentum=None,
111+
gradient_clipping_threshold=None,
107112
sparse_update=False):
108113
# initialize strategy.
109114
if is_static:
@@ -152,6 +157,11 @@ def __init__(self,
152157
self.attr['sparse_update'] = True
153158
self.attr['sparse_remote_update'] = True
154159

160+
if gradient_clipping_threshold is not None and \
161+
is_compatible_with(gradient_clipping_threshold, float):
162+
self.attr['gradient_clipping_threshold'] = \
163+
gradient_clipping_threshold
164+
155165
def set_default_parameter_name(self, name):
156166
"""
157167
Set default parameter name. If parameter not set, then will use default

0 commit comments

Comments
 (0)