Skip to content

Commit c27554a

Browse files
authored
Merge pull request #14336 from jacquesqiao/add_bilinear_tensor_product_layer
add bilinear_tensor_product layer
2 parents 4d546f6 + 4d6f751 commit c27554a

File tree

3 files changed

+81
-0
lines changed

3 files changed

+81
-0
lines changed

paddle/fluid/API.spec

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -184,6 +184,7 @@ paddle.fluid.layers.hash ArgSpec(args=['input', 'hash_size', 'num_hash', 'name']
184184
paddle.fluid.layers.grid_sampler ArgSpec(args=['x', 'grid', 'name'], varargs=None, keywords=None, defaults=(None,))
185185
paddle.fluid.layers.log_loss ArgSpec(args=['input', 'label', 'epsilon', 'name'], varargs=None, keywords=None, defaults=(0.0001, None))
186186
paddle.fluid.layers.add_position_encoding ArgSpec(args=['input', 'alpha', 'beta', 'name'], varargs=None, keywords=None, defaults=(None,))
187+
paddle.fluid.layers.bilinear_tensor_product ArgSpec(args=['x', 'y', 'size', 'act', 'name', 'param_attr', 'bias_attr'], varargs=None, keywords=None, defaults=(None, None, None, None))
187188
paddle.fluid.layers.data ArgSpec(args=['name', 'shape', 'append_batch_size', 'dtype', 'lod_level', 'type', 'stop_gradient'], varargs=None, keywords=None, defaults=(True, 'float32', 0, VarType.LOD_TENSOR, True))
188189
paddle.fluid.layers.open_files ArgSpec(args=['filenames', 'shapes', 'lod_levels', 'dtypes', 'thread_num', 'buffer_size', 'pass_num', 'is_test'], varargs=None, keywords=None, defaults=(None, None, 1, None))
189190
paddle.fluid.layers.read_file ArgSpec(args=['reader'], varargs=None, keywords=None, defaults=None)

python/paddle/fluid/layers/nn.py

Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -165,6 +165,7 @@
165165
'grid_sampler',
166166
'log_loss',
167167
'add_position_encoding',
168+
'bilinear_tensor_product',
168169
]
169170

170171

@@ -8289,3 +8290,72 @@ def add_position_encoding(input, alpha, beta, name=None):
82898290
attrs={"alpha": alpha,
82908291
"beta": beta})
82918292
return out
8293+
8294+
8295+
def bilinear_tensor_product(x,
8296+
y,
8297+
size,
8298+
act=None,
8299+
name=None,
8300+
param_attr=None,
8301+
bias_attr=None):
8302+
"""
8303+
**Add Bilinear Tensor Product Layer**
8304+
8305+
This layer performs bilinear tensor product on two inputs.
8306+
For example:
8307+
8308+
.. math::
8309+
out{i} = x * W_{i} * {y^\mathrm{T}}, i=0,1,...,size-1
8310+
8311+
In this formula:
8312+
- :math:`x`: the first input contains M elements, shape is [batch_size, M].
8313+
- :math:`y`: the second input contains N elements, shape is [batch_size, N].
8314+
- :math:`W_{i}`: the i-th learned weight, shape is [M, N]
8315+
- :math:`out{i}`: the i-th element of out, shape is [batch_size, size].
8316+
- :math:`y^\mathrm{T}`: the transpose of :math:`y_{2}`.
8317+
8318+
Args:
8319+
x (Variable): 2-D input tensor with shape [batch_size, M]
8320+
y (Variable): 2-D input tensor with shape [batch_size, N]
8321+
size (int): The dimension of this layer.
8322+
act (str, default None): Activation to be applied to the output of this layer.
8323+
name (str, default None): The name of this layer.
8324+
param_attr (ParamAttr, default None): The parameter attribute for the learnable w.
8325+
parameters/weights of this layer.
8326+
bias_attr (ParamAttr, default None): The parameter attribute for the bias
8327+
of this layer. If it is set to False, no bias will be added to the output units.
8328+
If it is set to None, the bias is initialized zero. Default: None.
8329+
8330+
Returns:
8331+
Variable: A 2-D Tensor of shape [batch_size, size].
8332+
8333+
Examples:
8334+
.. code-block:: python
8335+
8336+
tensor = bilinear_tensor_product(x=layer1, y=layer2, size=1000)
8337+
"""
8338+
helper = LayerHelper('bilinear_tensor_product', **locals())
8339+
dtype = helper.input_dtype('x')
8340+
8341+
param_shape = [size, x.shape[1], y.shape[1]]
8342+
8343+
w = helper.create_parameter(
8344+
attr=helper.param_attr, shape=param_shape, dtype=dtype, is_bias=False)
8345+
8346+
if name is None:
8347+
out = helper.create_variable_for_type_inference(dtype=dtype)
8348+
else:
8349+
out = helper.create_variable(name=name, dtype=dtype, persistable=False)
8350+
8351+
inputs = {"X": x, "Y": y, "Weight": w}
8352+
if helper.bias_attr:
8353+
bias_size = [1, size]
8354+
bias = helper.create_parameter(
8355+
attr=helper.bias_attr, shape=bias_size, dtype=dtype, is_bias=True)
8356+
inputs["Bias"] = bias
8357+
helper.append_op(
8358+
type="bilinear_tensor_product", inputs=inputs, outputs={"Out": out})
8359+
8360+
# add activation
8361+
return helper.append_activation(out)

python/paddle/fluid/tests/unittests/test_layers.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -911,6 +911,16 @@ def test_affine_grid(self):
911911
self.assertIsNotNone(data_1)
912912
print(str(program))
913913

914+
def test_bilinear_tensor_product_layer(self):
915+
program = Program()
916+
with program_guard(program):
917+
data = layers.data(name='data', shape=[4], dtype="float32")
918+
919+
theta = layers.data(name="theta", shape=[5], dtype="float32")
920+
out = layers.bilinear_tensor_product(data, theta, 6)
921+
922+
print(str(program))
923+
914924

915925
if __name__ == '__main__':
916926
unittest.main()

0 commit comments

Comments
 (0)