Skip to content

Commit ebb153b

Browse files
author
Haonan
authored
Merge pull request #416 from yu239/hl_activetype
change the act.name for LinearActivation() to "linear" so that it won't fail in hl_activetype; also fix the hasinputsset in submodel
2 parents eb3bf9e + 45f6e1a commit ebb153b

File tree

4 files changed

+10
-10
lines changed

4 files changed

+10
-10
lines changed

paddle/utils/Util.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -378,7 +378,7 @@ hl_activation_mode_t hlActiveType(const std::string& type) {
378378
return HL_ACTIVATION_RELU;
379379
} else if (type == "tanh") {
380380
return HL_ACTIVATION_TANH;
381-
} else if (type == "linear") {
381+
} else if (type == "linear" || type == "") {
382382
return HL_ACTIVATION_LINEAR;
383383
} else {
384384
LOG(FATAL) << "Do not support activation type " << type;

python/paddle/trainer/config_parser.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -218,7 +218,7 @@ def Inputs(*args):
218218

219219
@config_func
220220
def HasInputsSet():
221-
return len(g_config.model_config.input_layer_names) != 0
221+
return len(g_current_submodel.input_layer_names) != 0
222222

223223

224224
# Define the name of the output layers of the NeuralNetwork.
@@ -1170,22 +1170,22 @@ def parse_block_expand(block_expand, input_layer_name, block_expand_conf):
11701170
block_expand_conf.output_x = 0
11711171
else:
11721172
block_expand_conf.output_x = cnn_output_size(
1173-
block_expand.img_size_x, block_expand.block_x,
1173+
block_expand.img_size_x, block_expand.block_x,
11741174
block_expand.padding_x, block_expand.stride_x, False)
11751175

11761176
if block_expand_conf.img_size_y == 0:
11771177
block_expand_conf.output_y = 0
11781178
else:
11791179
block_expand_conf.output_y = cnn_output_size(
1180-
block_expand.img_size_y, block_expand.block_y,
1180+
block_expand.img_size_y, block_expand.block_y,
11811181
block_expand.padding_y, block_expand.stride_y, False)
11821182

11831183
def parse_maxout(maxout, input_layer_name, maxout_conf):
11841184
maxout_conf.channels = maxout.channels
11851185
maxout_conf.groups = maxout.groups
11861186
maxout_conf.img_size_x = maxout.img_size_x
11871187
maxout_conf.img_size_y = maxout.img_size_y
1188-
1188+
11891189
# Define an evaluator
11901190
@config_func
11911191
def Evaluator(
@@ -1881,7 +1881,7 @@ def __init__(
18811881
self.config.inputs[0].maxout_conf)
18821882
maxout_conf = self.config.inputs[0].maxout_conf
18831883
self.set_layer_size(g_layer_map[input_layer.name].size / maxout_conf.groups)
1884-
1884+
18851885
# key: cost type
18861886
# value: cost class
18871887
g_cost_map = {}

python/paddle/trainer_config_helpers/activations.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,9 +23,9 @@
2323

2424
class BaseActivation(object):
2525
"""
26-
A mark for activation class.
26+
A mark for activation class.
2727
Each activation inherit BaseActivation, which has two parameters.
28-
28+
2929
:param name: activation name in paddle config.
3030
:type name: basestring
3131
:param support_hppl: True if supported by hppl. HPPL is a library used by paddle
@@ -194,7 +194,7 @@ def __init__(self): BaseActivation.__init__(self, 'square', False)
194194
class ExpActivation(BaseActivation):
195195
"""
196196
Exponential Activation.
197-
197+
198198
.. math::
199199
f(z) = e^z.
200200
"""

python/paddle/trainer_config_helpers/layers.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1766,7 +1766,7 @@ def img_pool_layer(input, pool_size, name=None,
17661766
:type pool_size_y: int|None
17671767
:param num_channels: number of input channel.
17681768
:type num_channels: int
1769-
:param pool_type: pooling type. MaxPooling or AveragePooling. Default is
1769+
:param pool_type: pooling type. MaxPooling or AvgPooling. Default is
17701770
MaxPooling.
17711771
:type pool_type: BasePoolingType
17721772
:param stride: stride width of pooling.

0 commit comments

Comments
 (0)