Skip to content

Commit 5392a50

Browse files
committed
Merge remote-tracking branch 'upstream/develop' into factorization_machine_layer
2 parents d5a6c81 + 6da78d9 commit 5392a50

File tree

21 files changed

+812
-47
lines changed

21 files changed

+812
-47
lines changed

doc/api/v2/config/layer.rst

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -382,6 +382,11 @@ cos_sim
382382
.. autoclass:: paddle.v2.layer.cos_sim
383383
:noindex:
384384

385+
l2_distance
386+
-----------
387+
.. autoclass:: paddle.v2.layer.l2_distance
388+
:noindex:
389+
385390
trans
386391
-----
387392
.. autoclass:: paddle.v2.layer.trans

paddle/framework/backward.cc

Lines changed: 3 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -513,19 +513,14 @@ ParamGradInfoMap AppendBackward(
513513
const int root_block_idx = 0;
514514
auto root_block = program_desc.MutableBlock(root_block_idx);
515515

516-
// insert fill one op for target
517-
// TODO(qiao) add some check to the target.
518516
std::string fill_one_op_out = GradVarName(target.Name());
519-
std::vector<int64_t> target_shape_desc = target.Shape();
520-
std::vector<int> target_shape;
521-
std::transform(target_shape_desc.begin(), target_shape_desc.end(),
522-
std::back_inserter(target_shape),
523-
[](int64_t dim) { return static_cast<int>(dim); });
517+
bool is_scalar = target.Shape() == std::vector<int64_t>{1};
518+
PADDLE_ENFORCE(is_scalar, "target should be scalar");
524519
VLOG(3) << "backward from loss=" << target.Name()
525520
<< " data_type=" << target.GetDataType();
526521
std::unique_ptr<OpDescBind> fill_one_op(
527522
new OpDescBind("fill_constant", {}, {{"Out", {fill_one_op_out}}},
528-
{{"shape", target_shape},
523+
{{"shape", std::vector<int>{1}},
529524
{"value", static_cast<float>(1.0)},
530525
{"data_type", target.GetDataType()}}));
531526
// infer var type of fill_one_op

paddle/framework/backward_test.cc

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -508,6 +508,7 @@ TEST(Backward, simple_single_op) {
508508
op->SetOutput("Out", {"out"});
509509

510510
auto target = f::VarDescBind("out");
511+
target.SetShape({1});
511512
auto var_to_grad = AppendBackward(program, target, {});
512513

513514
ASSERT_EQ(block->AllOps().size(), 3UL);
@@ -544,6 +545,7 @@ TEST(Backward, default_attribute) {
544545
op->CheckAttrs();
545546

546547
auto target = f::VarDescBind("out");
548+
target.SetShape({1});
547549
AppendBackward(program, target, {});
548550

549551
ASSERT_EQ(block->AllOps().size(), 3UL);
@@ -581,6 +583,7 @@ TEST(Backward, simple_mult_op) {
581583
op3->SetOutput("Out", {"out3"});
582584

583585
auto target = f::VarDescBind("out3");
586+
target.SetShape({1});
584587
size_t forward_len = block->AllOps().size();
585588
auto var_to_grad = AppendBackward(program, target, {});
586589

@@ -670,6 +673,7 @@ TEST(Backward, intermedia_var_no_grad) {
670673
op4->SetOutput("Out", {"out4"});
671674

672675
auto target = f::VarDescBind("out4");
676+
target.SetShape({1});
673677
size_t forward_len = block->AllOps().size();
674678
auto var_to_grad = AppendBackward(program, target, {"out3"});
675679

@@ -730,6 +734,7 @@ TEST(Backward, var_no_grad) {
730734
op2->SetOutput("Z", {"z2"});
731735

732736
auto target = f::VarDescBind("z2");
737+
target.SetShape({1});
733738
size_t forward_len = block->AllOps().size();
734739
auto var_to_grad = AppendBackward(program, target, {"z1"});
735740

@@ -810,6 +815,7 @@ TEST(Backward, shared_var) {
810815
op3->SetOutput("Out", {"out3"});
811816

812817
auto target = f::VarDescBind("out3");
818+
target.SetShape({1});
813819
size_t forward_len = block->AllOps().size();
814820
auto var_to_grad = AppendBackward(program, target, {});
815821

@@ -888,6 +894,7 @@ TEST(Backward, half_backward) {
888894
op1->SetOutput("Out", {"out"});
889895

890896
auto target = f::VarDescBind("out");
897+
target.SetShape({1});
891898
size_t forward_len = block->AllOps().size();
892899
auto var_to_grad = AppendBackward(program, target, {"b"});
893900
f::OpDescBind *fill_op = block->AllOps()[forward_len];

paddle/framework/data_type.h

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,8 @@ inline std::type_index ToTypeIndex(DataType type) {
4646
return typeid(int);
4747
case DataType::INT64:
4848
return typeid(int64_t);
49+
case DataType::BOOL:
50+
return typeid(bool);
4951
default:
5052
PADDLE_THROW("Not support type %d", type);
5153
}
@@ -66,6 +68,9 @@ inline void VisitDataType(DataType type, Visitor visitor) {
6668
case DataType::INT64:
6769
visitor.template operator()<int64_t>();
6870
break;
71+
case DataType::BOOL:
72+
visitor.template operator()<bool>();
73+
break;
6974
default:
7075
PADDLE_THROW("Not supported");
7176
}
Lines changed: 91 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
2+
3+
Licensed under the Apache License, Version 2.0 (the "License");
4+
you may not use this file except in compliance with the License.
5+
You may obtain a copy of the License at
6+
7+
http://www.apache.org/licenses/LICENSE-2.0
8+
9+
Unless required by applicable law or agreed to in writing, software
10+
distributed under the License is distributed on an "AS IS" BASIS,
11+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
See the License for the specific language governing permissions and
13+
limitations under the License. */
14+
15+
#include "L2DistanceLayer.h"
16+
#include "paddle/utils/Logging.h"
17+
#include "paddle/utils/Stat.h"
18+
19+
namespace paddle {
20+
21+
REGISTER_LAYER(l2_distance, L2DistanceLayer);
22+
23+
bool L2DistanceLayer::init(const LayerMap& layerMap,
24+
const ParameterMap& parameterMap) {
25+
/* Initialize the basic parent class */
26+
Layer::init(layerMap, parameterMap);
27+
28+
CHECK_EQ(inputLayers_.size(), 2UL) << "The L2DistanceLayer accepts two and "
29+
<< "only two inputs.";
30+
CHECK_EQ(getSize(), 1UL) << "The output dimensionality of L2DistanceLayer "
31+
<< "is fixed to be 1.";
32+
33+
return true;
34+
}
35+
36+
void L2DistanceLayer::forward(PassType passType) {
37+
Layer::forward(passType);
38+
39+
const auto inV1 = getInputValue(0);
40+
const auto inV2 = getInputValue(1);
41+
42+
CHECK(inV1 && inV2);
43+
CHECK_EQ(inV1->getHeight(), inV2->getHeight())
44+
<< "The height of two inputs of this layer must be the same.";
45+
CHECK_EQ(inV1->getWidth(), inV2->getWidth())
46+
<< "The width of two inputs of this layer must be the same.";
47+
48+
int batchSize = inV1->getHeight();
49+
int output_dim = getSize();
50+
{
51+
REGISTER_TIMER_INFO("L2DistanceBpAtvTimer", getName().c_str());
52+
reserveOutput(batchSize, output_dim);
53+
auto outV = getOutputValue();
54+
CHECK(outV) << "The output matrix should not be null.";
55+
56+
Matrix::resizeOrCreate(
57+
inputSub_, inV1->getHeight(), inV1->getWidth(), false, useGpu_);
58+
59+
inputSub_->assign(*inV1);
60+
inputSub_->sub(*inV2);
61+
outV->sumOfProducts(*inputSub_, *inputSub_, 1, 0);
62+
outV->sqrt2(*outV);
63+
}
64+
}
65+
66+
void L2DistanceLayer::backward(const UpdateCallback& callback) {
67+
const auto outG = getOutputGrad();
68+
const auto outV = getOutputValue();
69+
CHECK(outG && outV);
70+
71+
auto inGrad1 = getInputGrad(0);
72+
auto inGrad2 = getInputGrad(1);
73+
74+
{
75+
REGISTER_TIMER_INFO("L2DistanceBpAtvTimer", getName().c_str());
76+
77+
if (inGrad1 || inGrad2) {
78+
outV->scalarDiv(*outV, 1.);
79+
outV->dotMul(*outG, *outV);
80+
}
81+
82+
if (inGrad1) inGrad1->addRowScale(0, *inputSub_, *outV);
83+
84+
if (inGrad2) {
85+
inputSub_->mulScalar(-1.);
86+
inGrad2->addRowScale(0, *inputSub_, *outV);
87+
}
88+
}
89+
}
90+
91+
} // namespace paddle
Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
2+
3+
Licensed under the Apache License, Version 2.0 (the "License");
4+
you may not use this file except in compliance with the License.
5+
You may obtain a copy of the License at
6+
7+
http://www.apache.org/licenses/LICENSE-2.0
8+
9+
Unless required by applicable law or agreed to in writing, software
10+
distributed under the License is distributed on an "AS IS" BASIS,
11+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
See the License for the specific language governing permissions and
13+
limitations under the License. */
14+
15+
#pragma once
16+
17+
#include "Layer.h"
18+
#include "paddle/math/Matrix.h"
19+
20+
namespace paddle {
21+
22+
/**
23+
* @brief The layer calculates the l2 distance between two input vectors.
24+
* \f[
25+
* f(\bf{x}, \bf{y}) = \sqrt{\sum_{i=1}^D(x_i - y_i)}
26+
* \f]
27+
*
28+
* - Input1: A vector (batchSize * dataDim)
29+
* - Input2: A vector (batchSize * dataDim)
30+
* - Output: A vector (batchSize * 1)
31+
*
32+
* The configuration api is: l2_distance_layer.
33+
*/
34+
35+
class L2DistanceLayer : public Layer {
36+
public:
37+
explicit L2DistanceLayer(const LayerConfig& config) : Layer(config) {}
38+
~L2DistanceLayer() {}
39+
40+
bool init(const LayerMap& layerMap,
41+
const ParameterMap& parameterMap) override;
42+
43+
void forward(PassType passType) override;
44+
void backward(const UpdateCallback& callback = nullptr) override;
45+
46+
private:
47+
// Store the result of subtracting Input2 from Input1 in forward computation,
48+
// which will be reused in backward computation.
49+
MatrixPtr inputSub_;
50+
};
51+
52+
} // namespace paddle

paddle/gserver/tests/test_LayerGrad.cpp

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -583,6 +583,7 @@ TEST(Layer, maxoutLayer) {
583583
testLayerGrad(config, "maxout", 10, false, useGpu);
584584
}
585585
}
586+
586587
void testFcLayer(string format, size_t nnz) {
587588
TestConfig config;
588589
config.biasSize = 1024;
@@ -2444,6 +2445,25 @@ TEST(Layer, ScaleSubRegionLayer) {
24442445
}
24452446
}
24462447

2448+
TEST(Layer, L2DistanceLayer) {
2449+
TestConfig config;
2450+
config.layerConfig.set_type("l2_distance");
2451+
config.layerConfig.set_size(1);
2452+
config.biasSize = 0;
2453+
2454+
const size_t input_dim = 27;
2455+
const size_t batch_size = 11;
2456+
2457+
config.inputDefs.push_back({INPUT_DATA, "layer_0", input_dim, 0});
2458+
config.inputDefs.push_back({INPUT_DATA, "layer_1", input_dim, 0});
2459+
config.layerConfig.add_inputs();
2460+
config.layerConfig.add_inputs();
2461+
2462+
for (auto useGpu : {false, true}) {
2463+
testLayerGrad(config, "l2_distance", batch_size, false, useGpu);
2464+
}
2465+
}
2466+
24472467
void testFactorizationMachineLayer(InputType type, bool useGpu) {
24482468
const int FACTOR_SIZE = 10;
24492469
TestConfig config;

paddle/operators/CMakeLists.txt

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -87,6 +87,11 @@ function(op_library TARGET)
8787
file(APPEND ${pybind_file} "USE_OP(pool2d_cudnn);\n")
8888
endif()
8989

90+
if ("${TARGET}" STREQUAL "logical_op")
91+
set(pybind_flag 1)
92+
file(APPEND ${pybind_file} "USE_OP(logical_and);\n")
93+
endif()
94+
9095
# pool_with_index_op contains several operators
9196
if ("${TARGET}" STREQUAL "pool_with_index_op")
9297
set(pybind_flag 1)

0 commit comments

Comments
 (0)