Skip to content

Commit c8da3f4

Browse files
authored
Merge pull request #433 from luotao1/bug
Fix previous ci error.
2 parents ebb153b + aa560db commit c8da3f4

File tree

2 files changed

+13
-7
lines changed

2 files changed

+13
-7
lines changed

python/paddle/trainer_config_helpers/layers.py

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -4048,7 +4048,8 @@ def cross_entropy(input, label, name=None, coeff=1.0, layer_attr=None):
40484048
40494049
.. code-block:: python
40504050
4051-
cost = cross_entropy(input, label)
4051+
cost = cross_entropy(input=input_layer,
4052+
label=label_layer)
40524053
40534054
:param input: The first input layer.
40544055
:type input: LayerOutput.
@@ -4084,7 +4085,8 @@ def cross_entropy_with_selfnorm(input, label, name=None, coeff=1.0,
40844085
40854086
.. code-block:: python
40864087
4087-
cost = cross_entropy_with_selfnorm(input, label)
4088+
cost = cross_entropy_with_selfnorm(input=input_layer,
4089+
label=label_layer)
40884090
40894091
:param input: The first input layer.
40904092
:type input: LayerOutput.
@@ -4122,7 +4124,7 @@ def sum_cost(input, name=None, layer_attr=None):
41224124
41234125
.. code-block:: python
41244126
4125-
cost = sum_cost(input)
4127+
cost = sum_cost(input=input_layer)
41264128
41274129
:param input: The first input layer.
41284130
:type input: LayerOutput.
@@ -4133,6 +4135,7 @@ def sum_cost(input, name=None, layer_attr=None):
41334135
:return: LayerOutput object.
41344136
:rtype: LayerOutput.
41354137
"""
4138+
assert isinstance(input, LayerOutput)
41364139
Layer(name=name,
41374140
type=LayerType.SUM_COST,
41384141
inputs=[input.name],
@@ -4141,7 +4144,8 @@ def sum_cost(input, name=None, layer_attr=None):
41414144

41424145
return LayerOutput(name,
41434146
LayerType.SUM_COST,
4144-
parents=[input])
4147+
parents=[input],
4148+
size=1)
41454149

41464150

41474151
@wrap_name_default()
@@ -4152,7 +4156,8 @@ def huber_cost(input, label, name=None, coeff=1.0, layer_attr=None):
41524156
41534157
.. code-block:: python
41544158
4155-
cost = huber_cost(input, label)
4159+
cost = huber_cost(input=input_layer,
4160+
label=label_layer)
41564161
41574162
:param input: The first input layer.
41584163
:type input: LayerOutput.
@@ -4188,7 +4193,8 @@ def multi_binary_label_cross_entropy(input, label, name=None, coeff=1.0,
41884193
41894194
.. code-block:: python
41904195
4191-
cost = multi_binary_label_cross_entropy(input, label)
4196+
cost = multi_binary_label_cross_entropy(input=input_layer,
4197+
label=label_layer)
41924198
41934199
:param input: The first input layer.
41944200
:type input: LayerOutput

python/paddle/trainer_config_helpers/tests/configs/test_cost_layers.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,4 +25,4 @@
2525
huber_cost(input=data_layer(name='huber_probs', size=1),
2626
label=data_layer(name='huber_label', size=1)),
2727
multi_binary_label_cross_entropy(input=probs, label=xe_label),
28-
sum_cost(hidden))
28+
sum_cost(input=hidden))

0 commit comments

Comments
 (0)