Skip to content

Commit d130d18

Browse files
authored
Complete unittest for trainer_config_helpers. (#108)
* Fix lots of trainer_config_helpers bug, and complete unittest for `layers.py`
1 parent 1c56e0d commit d130d18

28 files changed

+844
-248
lines changed

python/paddle/trainer/config_parser.py

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1279,7 +1279,7 @@ def create_input_parameter(
12791279
size,
12801280
dims=None,
12811281
sparse = None,
1282-
format = "csr"):
1282+
format = None):
12831283
if dims is None:
12841284
# TODO(yuyang18): print warning and callstack here!
12851285
dims = list()
@@ -2074,7 +2074,7 @@ def __init__(
20742074
active_type='linear',
20752075
device=None,
20762076
bias=False,
2077-
output_max_index=False):
2077+
output_max_index=None):
20782078
super(MaxLayer, self).__init__(name, 'max', 0, inputs=inputs, device=device)
20792079
config_assert(len(self.inputs) == 1, 'MaxLayer must have 1 input')
20802080
self.config.trans_type = trans_type
@@ -2083,7 +2083,8 @@ def __init__(
20832083
input_layer = self.get_input_layer(input_index)
20842084
self.set_layer_size(input_layer.size)
20852085
self.create_bias_parameter(bias, self.config.size)
2086-
self.config.output_max_index=output_max_index
2086+
if output_max_index is not None:
2087+
self.config.output_max_index = output_max_index
20872088

20882089

20892090
@config_layer('maxid')
@@ -2440,7 +2441,7 @@ def __init__(
24402441
inputs,
24412442
size=0,
24422443
bias=True,
2443-
error_clipping_threshold=0.0,
2444+
error_clipping_threshold=None,
24442445
**xargs):
24452446
config_assert(inputs, 'inputs cannot be empty')
24462447
super(MixedLayer, self).__init__(
@@ -2510,7 +2511,8 @@ def __init__(
25102511

25112512
self.create_bias_parameter(bias, self.config.size)
25122513

2513-
self.config.error_clipping_threshold = error_clipping_threshold
2514+
if error_clipping_threshold is not None:
2515+
self.config.error_clipping_threshold = error_clipping_threshold
25142516

25152517
# like MixedLayer, but no bias parameter
25162518
@config_func

python/paddle/trainer_config_helpers/activations.py

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,10 @@
1515
__all__ = ["TanhActivation", "SigmoidActivation",
1616
"SoftmaxActivation", "IdentityActivation", "LinearActivation",
1717
'SequenceSoftmaxActivation', 'ExpActivation',
18-
"ReluActivation", "BReluActivation", "SoftReluActivation", "STanhActivation",
19-
"AbsActivation", "SquareActivation", "BaseActivation"]
18+
"ReluActivation", "BReluActivation", "SoftReluActivation",
19+
"STanhActivation",
20+
"AbsActivation", "SquareActivation",
21+
"BaseActivation"]
2022

2123

2224
class BaseActivation(object):
@@ -36,6 +38,9 @@ def __init__(self, name, support_hppl):
3638
self.name = name
3739
self.support_hppl = support_hppl
3840

41+
def __repr__(self):
42+
return self.name
43+
3944

4045
class TanhActivation(BaseActivation):
4146
"""

0 commit comments

Comments
 (0)