Skip to content

Commit 0b76c73

Browse files
authored
AddBiasOp does not care num_flatten_dims (#5200)
* AddBiasOp does not care num_flatten_dims * Add comments
1 parent 2e91c7d commit 0b76c73

File tree

1 file changed

+18
-2
lines changed

1 file changed

+18
-2
lines changed

python/paddle/v2/framework/layer_helper.py

Lines changed: 18 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -142,8 +142,24 @@ def create_global_variable(self, *args, **kwargs):
142142
return self.program.global_block().create_var(
143143
*args, persistable=False, **kwargs)
144144

145-
def append_bias_op(self, input_var):
146-
size = list(input_var.shape[1:])
145+
def append_bias_op(self, input_var, num_flatten_dims=None):
146+
"""
147+
Append bias operator and return its output. If the user does not set
148+
bias_attr, append_bias_op will return input_var
149+
150+
:param input_var: the input variable. The len(input_var.shape) is larger
151+
or equal than 2.
152+
:param num_flatten_dims: The input tensor will be flatten as a matrix
153+
when adding bias.
154+
`matrix.shape = product(input_var.shape[0:num_flatten_dims]), product(
155+
input_var.shape[num_flatten_dims:])`
156+
"""
157+
if num_flatten_dims is None:
158+
num_flatten_dims = self.kwargs.get('num_flatten_dims', None)
159+
if num_flatten_dims is None:
160+
num_flatten_dims = 1
161+
162+
size = list(input_var.shape[num_flatten_dims:])
147163
bias_attr = self.bias_attr()
148164
if not bias_attr:
149165
return input_var

0 commit comments

Comments
 (0)