Skip to content

Commit 880774d

Browse files
author
Haonan
committed
change the act.name for LinearActivation() to "linear" so that it won't
fail in hl_activetype; also fix the hasinputsset in submodel
1 parent 8d4c453 commit 880774d

File tree

3 files changed

+10
-10
lines changed

3 files changed

+10
-10
lines changed

python/paddle/trainer/config_parser.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -218,7 +218,7 @@ def Inputs(*args):
218218

219219
@config_func
220220
def HasInputsSet():
221-
return len(g_config.model_config.input_layer_names) != 0
221+
return len(g_current_submodel.input_layer_names) != 0
222222

223223

224224
# Define the name of the output layers of the NeuralNetwork.
@@ -1120,22 +1120,22 @@ def parse_block_expand(block_expand, input_layer_name, block_expand_conf):
11201120
block_expand_conf.output_x = 0
11211121
else:
11221122
block_expand_conf.output_x = cnn_output_size(
1123-
block_expand.img_size_x, block_expand.block_x,
1123+
block_expand.img_size_x, block_expand.block_x,
11241124
block_expand.padding_x, block_expand.stride_x, False)
11251125

11261126
if block_expand_conf.img_size_y == 0:
11271127
block_expand_conf.output_y = 0
11281128
else:
11291129
block_expand_conf.output_y = cnn_output_size(
1130-
block_expand.img_size_y, block_expand.block_y,
1130+
block_expand.img_size_y, block_expand.block_y,
11311131
block_expand.padding_y, block_expand.stride_y, False)
11321132

11331133
def parse_maxout(maxout, input_layer_name, maxout_conf):
11341134
maxout_conf.channels = maxout.channels
11351135
maxout_conf.groups = maxout.groups
11361136
maxout_conf.img_size_x = maxout.img_size_x
11371137
maxout_conf.img_size_y = maxout.img_size_y
1138-
1138+
11391139
# Define an evaluator
11401140
@config_func
11411141
def Evaluator(
@@ -1773,7 +1773,7 @@ def __init__(
17731773
self.config.inputs[0].maxout_conf)
17741774
maxout_conf = self.config.inputs[0].maxout_conf
17751775
self.set_layer_size(g_layer_map[input_layer.name].size / maxout_conf.groups)
1776-
1776+
17771777
# key: cost type
17781778
# value: cost class
17791779
g_cost_map = {}

python/paddle/trainer_config_helpers/activations.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -23,9 +23,9 @@
2323

2424
class BaseActivation(object):
2525
"""
26-
A mark for activation class.
26+
A mark for activation class.
2727
Each activation inherit BaseActivation, which has two parameters.
28-
28+
2929
:param name: activation name in paddle config.
3030
:type name: basestring
3131
:param support_hppl: True if supported by hppl. HPPL is a library used by paddle
@@ -104,7 +104,7 @@ class IdentityActivation(BaseActivation):
104104
Just do nothing for output both forward/backward.
105105
"""
106106

107-
def __init__(self): BaseActivation.__init__(self, '', False)
107+
def __init__(self): BaseActivation.__init__(self, 'linear', False)
108108

109109

110110
LinearActivation = IdentityActivation
@@ -194,7 +194,7 @@ def __init__(self): BaseActivation.__init__(self, 'square', False)
194194
class ExpActivation(BaseActivation):
195195
"""
196196
Exponential Activation.
197-
197+
198198
.. math::
199199
f(z) = e^z.
200200
"""

python/paddle/trainer_config_helpers/layers.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1657,7 +1657,7 @@ def img_pool_layer(input, pool_size, name=None,
16571657
:type pool_size_y: int|None
16581658
:param num_channels: number of input channel.
16591659
:type num_channels: int
1660-
:param pool_type: pooling type. MaxPooling or AveragePooling. Default is
1660+
:param pool_type: pooling type. MaxPooling or AvgPooling. Default is
16611661
MaxPooling.
16621662
:type pool_type: BasePoolingType
16631663
:param stride: stride width of pooling.

0 commit comments

Comments
 (0)