@@ -1258,6 +1258,45 @@ def sequence_softmax(input, param_attr=None, bias_attr=None, use_cudnn=True):
1258
1258
1259
1259
1260
1260
def softmax (input , param_attr = None , bias_attr = None , use_cudnn = True , name = None ):
1261
+ """
1262
+ The input of the softmax layer is a 2-D tensor with shape N x K (N is the
1263
+ batch_size, K is the dimension of input feature). The output tensor has the
1264
+ same shape as the input tensor.
1265
+
1266
+ For each row of the input tensor, the softmax operator squashes the
1267
+ K-dimensional vector of arbitrary real values to a K-dimensional vector of real
1268
+ values in the range [0, 1] that add up to 1.
1269
+
1270
+ It computes the exponential of the given dimension and the sum of exponential
1271
+ values of all the other dimensions in the K-dimensional vector input.
1272
+ Then the ratio of the exponential of the given dimension and the sum of
1273
+ exponential values of all the other dimensions is the output of the softmax
1274
+ operator.
1275
+
1276
+ For each row :math:`i` and each column :math:`j` in Input(X), we have:
1277
+
1278
+ .. math::
1279
+
1280
+ Out[i, j] = \\ frac{\exp(X[i, j])}{\sum_j(exp(X[i, j])}
1281
+
1282
+ Args:
1283
+ input (Variable): The input variable.
1284
+ bias_attr (ParamAttr): attributes for bias
1285
+ param_attr (ParamAttr): attributes for parameter
1286
+ use_cudnn (bool): Use cudnn kernel or not, it is valid only when the cudnn \
1287
+ library is installed.
1288
+
1289
+ Returns:
1290
+ Variable: output of softmax
1291
+
1292
+ Examples:
1293
+
1294
+ .. code-block:: python
1295
+
1296
+ fc = fluid.layers.fc(input=x, size=10)
1297
+ softmax = fluid.layers.softmax(input=fc)
1298
+
1299
+ """
1261
1300
helper = LayerHelper ('softmax' , ** locals ())
1262
1301
dtype = helper .input_dtype ()
1263
1302
softmax_out = helper .create_tmp_variable (dtype )
0 commit comments