Skip to content

Commit 2f6b529

Browse files
committed
refine code and comments, test=develop
1 parent 02d6805 commit 2f6b529

File tree

3 files changed

+3
-8
lines changed

3 files changed

+3
-8
lines changed

paddle/fluid/operators/hierarchical_sigmoid_op.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@ class HierarchicalSigmoidGradOpGradVarTypeInference
193193
block->Var(out_W_var_name)
194194
->SetType(framework::proto::VarType::LOD_TENSOR);
195195
VLOG(3) << "hierarchical_sigmoid_grad op "
196-
<< framework::GradVarName("Bias") << " is set to SelectedRows";
196+
<< framework::GradVarName("Bias") << " is set to LoDTensor";
197197
block->Var(out_Bias_var_name)
198198
->SetType(framework::proto::VarType::LOD_TENSOR);
199199
}

paddle/fluid/operators/math/matrix_bit_code.cc

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -120,8 +120,6 @@ void MatrixBitCodeFunctor<T>::MulGradWeight(const framework::LoDTensor& tmat,
120120
size_t input_width = input.dims()[1];
121121
size_t tmat_width = tmat.dims()[1];
122122
size_t weight_width = weight->dims()[1];
123-
VLOG(30) << "sparse w_grad dims is [" << weight->dims()[0] << " ,"
124-
<< weight->dims()[1] << " ]";
125123
auto tmat_value = tmat.data<T>();
126124
auto weight_value = weight->data<T>();
127125
auto input_value = input.data<T>();
@@ -147,8 +145,6 @@ void MatrixBitCodeFunctor<T>::MulGradWeight(const framework::LoDTensor& tmat,
147145
size_t input_width = input.dims()[1];
148146
size_t tmat_width = tmat.dims()[1];
149147
size_t weight_width = weight->value().dims()[1];
150-
VLOG(30) << "sparse w_grad dims is: [" << weight->value().dims()[0] << " ,"
151-
<< weight->value().dims()[1] << " ]";
152148
auto tmat_value = tmat.data<T>();
153149
auto weight_value = weight->mutable_value()->data<T>();
154150
auto input_value = input.data<T>();
@@ -157,11 +153,9 @@ void MatrixBitCodeFunctor<T>::MulGradWeight(const framework::LoDTensor& tmat,
157153
int code_length = code->get_length();
158154
for (int j = 0; j < code_length; ++j) {
159155
size_t index = code->calc_index(j);
160-
161156
for (size_t k = 0; k < input_width; ++k) {
162157
int64_t row_index =
163158
weight->AutoGrownIndex(static_cast<int64_t>(index), false, true);
164-
165159
weight_value[row_index * weight_width + k] +=
166160
tmat_value[i * tmat_width + j] * input_value[input_width * i + k];
167161
}

python/paddle/fluid/layers/nn.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4581,7 +4581,8 @@ def hsigmoid(input,
45814581
is not set, the bias is initialized zero. Default: None.
45824582
name (str|None): A name for this layer(optional). If set None, the layer
45834583
will be named automatically. Default: None.
4584-
is_costum: (bool|False)using user defined binary tree instead of default complete binary tree
4584+
is_costum: (bool|False)using user defined binary tree instead of default complete binary tree, if costum is
4585+
set you need to set ptable/pcode/non_leaf_num, otherwise num_classes should be set
45854586
is_sparse: (bool|False)using sparse update instead of dense update
45864587
45874588
Returns:

0 commit comments

Comments
 (0)