@@ -4048,7 +4048,8 @@ def cross_entropy(input, label, name=None, coeff=1.0, layer_attr=None):
4048
4048
4049
4049
.. code-block:: python
4050
4050
4051
- cost = cross_entropy(input, label)
4051
+ cost = cross_entropy(input=input_layer,
4052
+ label=label_layer)
4052
4053
4053
4054
:param input: The first input layer.
4054
4055
:type input: LayerOutput.
@@ -4084,7 +4085,8 @@ def cross_entropy_with_selfnorm(input, label, name=None, coeff=1.0,
4084
4085
4085
4086
.. code-block:: python
4086
4087
4087
- cost = cross_entropy_with_selfnorm(input, label)
4088
+ cost = cross_entropy_with_selfnorm(input=input_layer,
4089
+ label=label_layer)
4088
4090
4089
4091
:param input: The first input layer.
4090
4092
:type input: LayerOutput.
@@ -4122,7 +4124,7 @@ def sum_cost(input, name=None, layer_attr=None):
4122
4124
4123
4125
.. code-block:: python
4124
4126
4125
- cost = sum_cost(input)
4127
+ cost = sum_cost(input=input_layer )
4126
4128
4127
4129
:param input: The first input layer.
4128
4130
:type input: LayerOutput.
@@ -4133,6 +4135,7 @@ def sum_cost(input, name=None, layer_attr=None):
4133
4135
:return: LayerOutput object.
4134
4136
:rtype: LayerOutput.
4135
4137
"""
4138
+ assert isinstance (input , LayerOutput )
4136
4139
Layer (name = name ,
4137
4140
type = LayerType .SUM_COST ,
4138
4141
inputs = [input .name ],
@@ -4141,7 +4144,8 @@ def sum_cost(input, name=None, layer_attr=None):
4141
4144
4142
4145
return LayerOutput (name ,
4143
4146
LayerType .SUM_COST ,
4144
- parents = [input ])
4147
+ parents = [input ],
4148
+ size = 1 )
4145
4149
4146
4150
4147
4151
@wrap_name_default ()
@@ -4152,7 +4156,8 @@ def huber_cost(input, label, name=None, coeff=1.0, layer_attr=None):
4152
4156
4153
4157
.. code-block:: python
4154
4158
4155
- cost = huber_cost(input, label)
4159
+ cost = huber_cost(input=input_layer,
4160
+ label=label_layer)
4156
4161
4157
4162
:param input: The first input layer.
4158
4163
:type input: LayerOutput.
@@ -4188,7 +4193,8 @@ def multi_binary_label_cross_entropy(input, label, name=None, coeff=1.0,
4188
4193
4189
4194
.. code-block:: python
4190
4195
4191
- cost = multi_binary_label_cross_entropy(input, label)
4196
+ cost = multi_binary_label_cross_entropy(input=input_layer,
4197
+ label=label_layer)
4192
4198
4193
4199
:param input: The first input layer.
4194
4200
:type input: LayerOutput
0 commit comments