@@ -4584,11 +4584,10 @@ def nce(input,
4584
4584
4585
4585
def hsigmoid (input ,
4586
4586
label ,
4587
- num_classes = None ,
4587
+ num_classes ,
4588
4588
param_attr = None ,
4589
4589
bias_attr = None ,
4590
4590
name = None ,
4591
- non_leaf_num = None ,
4592
4591
path_table = None ,
4593
4592
path_code = None ,
4594
4593
is_custom = False ,
@@ -4622,7 +4621,9 @@ def hsigmoid(input,
4622
4621
and :math:`D` is the feature size.
4623
4622
label (Variable): The tensor variable contains labels of training data.
4624
4623
It's a tensor with shape is :math:`[N \\ times 1]`.
4625
- num_classes: (int), The number of classes, must not be less than 2. with default tree this has to be set
4624
+ num_classes: (int), The number of classes, must not be less than 2. with default tree this has to be set,
4625
+ it should never be None under is_custom=False, but while is_custom is true, it should be non leaf num
4626
+ which indicates the num of classes using by binary classify.
4626
4627
param_attr (ParamAttr|None): The parameter attribute for learnable parameters/weights
4627
4628
of hsigmoid. If it is set to None or one attribute of ParamAttr, hsigmoid
4628
4629
will create ParamAttr as param_attr. If the Initializer of the param_attr
@@ -4634,15 +4635,14 @@ def hsigmoid(input,
4634
4635
is not set, the bias is initialized zero. Default: None.
4635
4636
name (str|None): A name for this layer(optional). If set None, the layer
4636
4637
will be named automatically. Default: None.
4637
- non_leaf_num: this defines the number of non-leaf nodes in costumed tree
4638
4638
path_table: (Variable|None) this variable can store each batch of samples' path to root,
4639
4639
it should be in leaf -> root order
4640
4640
path_table should have the same shape with path_code, and for each sample i path_table[i] indicates a np.array like
4641
4641
structure and each element in this array is indexes in parent nodes' Weight Matrix.
4642
4642
path_code: (Variable|None) this variable can store each batch of samples' code,
4643
4643
each code consist with every code of parent nodes. it should be in leaf -> root order
4644
4644
is_custom: (bool|False)using user defined binary tree instead of default complete binary tree, if costum is
4645
- set you need to set path_table/path_code/non_leaf_num , otherwise num_classes should be set
4645
+ set you need to set path_table/path_code/num_classes , otherwise num_classes should be set
4646
4646
is_sparse: (bool|False)using sparse update instead of dense update, if set, the gradient
4647
4647
of W and input will be sparse.
4648
4648
@@ -4671,8 +4671,8 @@ def hsigmoid(input,
4671
4671
raise ValueError ("path_code should not be None with costum tree" )
4672
4672
elif (is_custom ) and (path_table is None ):
4673
4673
raise ValueError ("path_table should not be None with costum tree" )
4674
- elif (is_custom ) and (non_leaf_num is None ):
4675
- raise ValueError ("non_leaf_num should not be None with costum tree" )
4674
+ elif (is_custom ) and (num_classes is None ):
4675
+ raise ValueError ("num_classes should not be None with costum tree" )
4676
4676
else :
4677
4677
pass
4678
4678
@@ -4687,7 +4687,7 @@ def hsigmoid(input,
4687
4687
else :
4688
4688
weights = helper .create_parameter (
4689
4689
attr = helper .param_attr ,
4690
- shape = [non_leaf_num , dim ],
4690
+ shape = [num_classes , dim ],
4691
4691
is_bias = False ,
4692
4692
dtype = input .dtype )
4693
4693
inputs = {
@@ -4708,7 +4708,7 @@ def hsigmoid(input,
4708
4708
else :
4709
4709
bias = helper .create_parameter (
4710
4710
attr = helper .bias_attr ,
4711
- shape = [non_leaf_num , 1 ],
4711
+ shape = [num_classes , 1 ],
4712
4712
is_bias = True ,
4713
4713
dtype = input .dtype )
4714
4714
inputs ['Bias' ] = bias
0 commit comments