Skip to content

Commit 093c526

Browse files
authored
Merge pull request #5724 from ranqiu92/dot_product
add dot_prod_layer
2 parents 044d671 + 2e1cd33 commit 093c526

File tree

8 files changed

+222
-1
lines changed

8 files changed

+222
-1
lines changed

doc/api/v2/config/layer.rst

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -335,6 +335,16 @@ bilinear_interp
335335
.. autoclass:: paddle.v2.layer.bilinear_interp
336336
:noindex:
337337

338+
dot_prod
339+
---------
340+
.. autoclass:: paddle.v2.layer.dot_prod
341+
:noindex:
342+
343+
out_prod
344+
--------
345+
.. autoclass:: paddle.v2.layer.out_prod
346+
:noindex:
347+
338348
power
339349
-----
340350
.. autoclass:: paddle.v2.layer.power
Lines changed: 97 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,97 @@
1+
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
2+
3+
Licensed under the Apache License, Version 2.0 (the "License");
4+
you may not use this file except in compliance with the License.
5+
You may obtain a copy of the License at
6+
7+
http://www.apache.org/licenses/LICENSE-2.0
8+
9+
Unless required by applicable law or agreed to in writing, software
10+
distributed under the License is distributed on an "AS IS" BASIS,
11+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
See the License for the specific language governing permissions and
13+
limitations under the License. */
14+
15+
#include "Layer.h"
16+
#include "paddle/math/Matrix.h"
17+
#include "paddle/utils/Logging.h"
18+
#include "paddle/utils/Stat.h"
19+
20+
namespace paddle {
21+
22+
/**
23+
* @brief A layer for computing the dot product of two vectors.
24+
* Input1: vector (batchSize * dim)
25+
* Input2: vector (batchSize * dim)
26+
* Output: a matrix: (batchSize * 1)
27+
*/
28+
29+
class DotProdLayer : public Layer {
30+
public:
31+
explicit DotProdLayer(const LayerConfig& config) : Layer(config) {}
32+
33+
~DotProdLayer() {}
34+
35+
bool init(const LayerMap& layerMap,
36+
const ParameterMap& parameterMap) override;
37+
38+
void forward(PassType passType) override;
39+
void backward(const UpdateCallback& callback = nullptr) override;
40+
};
41+
42+
REGISTER_LAYER(dot_prod, DotProdLayer);
43+
44+
bool DotProdLayer::init(const LayerMap& layerMap,
45+
const ParameterMap& parameterMap) {
46+
Layer::init(layerMap, parameterMap);
47+
48+
CHECK_EQ(inputLayers_.size(), 2U);
49+
CHECK_EQ(1UL, getSize())
50+
<< "The output dimensionality of this layer should be fixed to 1.";
51+
52+
return true;
53+
}
54+
55+
void DotProdLayer::forward(PassType passType) {
56+
Layer::forward(passType);
57+
58+
MatrixPtr inV0 = getInputValue(0);
59+
MatrixPtr inV1 = getInputValue(1);
60+
61+
size_t batchSize = inV0->getHeight();
62+
CHECK_EQ(inV1->getHeight(), batchSize);
63+
CHECK_EQ(inV0->getWidth(), inV1->getWidth());
64+
65+
{
66+
REGISTER_TIMER_INFO("FwResetTimer", getName().c_str());
67+
reserveOutput(batchSize, 1);
68+
}
69+
70+
MatrixPtr outV = getOutputValue();
71+
{
72+
REGISTER_TIMER_INFO("FwDotProdTimer", getName().c_str());
73+
outV->sumOfProducts(*inV0, *inV1, 1, 0);
74+
}
75+
}
76+
77+
void DotProdLayer::backward(const UpdateCallback& callback) {
78+
MatrixPtr inV0 = getInputValue(0);
79+
MatrixPtr inV1 = getInputValue(1);
80+
MatrixPtr outG = getOutputGrad();
81+
MatrixPtr inG0 = getInputGrad(0);
82+
MatrixPtr inG1 = getInputGrad(1);
83+
84+
{
85+
REGISTER_TIMER_INFO("BwDotProdTimer", getName().c_str());
86+
87+
if (inG0) {
88+
inG0->addRowScale(0, *inV1, *outG);
89+
}
90+
91+
if (inG1) {
92+
inG1->addRowScale(0, *inV0, *outG);
93+
}
94+
}
95+
}
96+
97+
} // namespace paddle

paddle/gserver/tests/test_LayerGrad.cpp

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1081,6 +1081,21 @@ TEST(Layer, InterpolationLayer) {
10811081
}
10821082
}
10831083

1084+
TEST(Layer, DotProdLayer) {
1085+
TestConfig config;
1086+
config.layerConfig.set_type("dot_prod");
1087+
config.layerConfig.set_size(1);
1088+
1089+
config.inputDefs.push_back({INPUT_DATA, "layer_0", 10, 0});
1090+
config.layerConfig.add_inputs();
1091+
config.inputDefs.push_back({INPUT_DATA, "layer_1", 10, 0});
1092+
config.layerConfig.add_inputs();
1093+
1094+
for (auto useGpu : {false, true}) {
1095+
testLayerGrad(config, "dot_prod", 10, false, useGpu);
1096+
}
1097+
}
1098+
10841099
TEST(Layer, OuterProdLayer) {
10851100
TestConfig config;
10861101
config.layerConfig.set_type("out_prod");

python/paddle/trainer/config_parser.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3209,6 +3209,18 @@ def __init__(self, name, inputs, selected_indices, bias=False, **xargs):
32093209
self.set_layer_size(size)
32103210

32113211

3212+
@config_layer('dot_prod')
3213+
class DotProdLayer(LayerBase):
3214+
def __init__(self, name, inputs, device=None):
3215+
super(DotProdLayer, self).__init__(
3216+
name, 'dot_prod', 0, inputs, device=device)
3217+
config_assert(len(inputs) == 2, 'DotProdLayer must have 2 inputs.')
3218+
config_assert(
3219+
self.get_input_layer(0).size == self.get_input_layer(1).size,
3220+
"Two inputs should have the same size.")
3221+
self.set_layer_size(1)
3222+
3223+
32123224
@config_layer('out_prod')
32133225
class OuterProdLayer(LayerBase):
32143226
def __init__(self, name, inputs, device=None):

python/paddle/trainer_config_helpers/layers.py

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -115,6 +115,7 @@
115115
'huber_classification_cost',
116116
'block_expand_layer',
117117
'maxout_layer',
118+
'dot_prod_layer',
118119
'out_prod_layer',
119120
'printer_layer',
120121
'print_layer',
@@ -197,6 +198,7 @@ class LayerType(object):
197198
SCALING_LAYER = 'scaling'
198199
TRANS_LAYER = 'trans'
199200
ROTATE_LAYER = 'rotate'
201+
DOT_PROD_LAYER = 'dot_prod'
200202
OUT_PROD_LAYER = 'out_prod'
201203
FEATURE_MAP_EXPAND_LAYER = 'featmap_expand'
202204

@@ -4140,6 +4142,45 @@ def maxid_layer(input, name=None, layer_attr=None):
41404142
size=l.config.size)
41414143

41424144

4145+
@wrap_name_default()
4146+
def dot_prod_layer(input1, input2, name=None, layer_attr=None):
4147+
"""
4148+
A layer for computing the dot product of two vectors.
4149+
4150+
The example usage is:
4151+
4152+
.. code-block:: python
4153+
4154+
dot_prod = dot_prod_layer(input1=vec1, input2=vec2)
4155+
4156+
:param name: The name of this layer. It is optional.
4157+
:type name: basestring
4158+
:param input1: The first input layer.
4159+
:type input: LayerOutput
4160+
:param input2: The second input layer.
4161+
:type input2: LayerOutput
4162+
:param layer_attr: The extra layer attribute. See ExtraLayerAttribute for
4163+
details.
4164+
:type layer_attr: ExtraLayerAttribute.
4165+
:return: LayerOutput object.
4166+
:rtype: LayerOutput
4167+
"""
4168+
assert isinstance(input1, LayerOutput)
4169+
assert isinstance(input2, LayerOutput)
4170+
assert input1.size == input2.size, ("Two inputs should have the same size.")
4171+
4172+
l = Layer(
4173+
name=name,
4174+
type=LayerType.DOT_PROD_LAYER,
4175+
inputs=[input1.name, input2.name],
4176+
**ExtraLayerAttribute.to_kwargs(layer_attr))
4177+
return LayerOutput(
4178+
name=name,
4179+
layer_type=LayerType.DOT_PROD_LAYER,
4180+
parents=[input1, input2],
4181+
size=l.config.size)
4182+
4183+
41434184
@wrap_name_default()
41444185
def out_prod_layer(input1, input2, name=None, layer_attr=None):
41454186
"""

python/paddle/trainer_config_helpers/tests/configs/file_list.sh

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ test_prelu_layer test_row_conv test_detection_output_layer test_multibox_loss_la
1010
test_recursive_topology test_gated_unit_layer test_clip_layer test_row_l2_norm_layer
1111
test_kmax_seq_socre_layer test_sub_nested_seq_select_layer test_scale_shift_layer
1212
test_seq_slice_layer test_cross_entropy_over_beam test_roi_pool_layer test_pooling3D_layer
13-
test_conv3d_layer test_deconv3d_layer test_BatchNorm3D test_resize_layer test_scale_sub_region_layer)
13+
test_conv3d_layer test_deconv3d_layer test_BatchNorm3D test_resize_layer test_scale_sub_region_layer
14+
test_dot_prod_layer)
1415

1516
export whole_configs=(test_split_datasource)
Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
type: "nn"
2+
layers {
3+
name: "vector1"
4+
type: "data"
5+
size: 10
6+
active_type: ""
7+
}
8+
layers {
9+
name: "vector2"
10+
type: "data"
11+
size: 10
12+
active_type: ""
13+
}
14+
layers {
15+
name: "__dot_prod_layer_0__"
16+
type: "dot_prod"
17+
size: 1
18+
active_type: ""
19+
inputs {
20+
input_layer_name: "vector1"
21+
}
22+
inputs {
23+
input_layer_name: "vector2"
24+
}
25+
}
26+
input_layer_names: "vector1"
27+
input_layer_names: "vector2"
28+
output_layer_names: "__dot_prod_layer_0__"
29+
sub_models {
30+
name: "root"
31+
layer_names: "vector1"
32+
layer_names: "vector2"
33+
layer_names: "__dot_prod_layer_0__"
34+
input_layer_names: "vector1"
35+
input_layer_names: "vector2"
36+
output_layer_names: "__dot_prod_layer_0__"
37+
is_recurrent_layer_group: false
38+
}
Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
from paddle.trainer_config_helpers import *
2+
3+
vec1 = data_layer(name='vector1', size=10)
4+
vec2 = data_layer(name='vector2', size=10)
5+
dot_product = dot_prod_layer(input1=vec1, input2=vec2)
6+
7+
outputs(dot_product)

0 commit comments

Comments
 (0)