Skip to content

Commit b1fd62f

Browse files
committed
test=develop
2 parents 8f07f60 + b717a32 commit b1fd62f

39 files changed

+667
-483
lines changed

paddle/fluid/framework/framework.proto

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,6 @@ message OpProto {
8080
optional bool duplicable = 3 [ default = false ];
8181
optional bool intermediate = 4 [ default = false ];
8282
optional bool dispensable = 5 [ default = false ];
83-
optional string reuse = 6;
8483
}
8584

8685
// AttrProto describes the C++ type Attribute.

paddle/fluid/framework/op_proto_maker.cc

Lines changed: 0 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@ namespace framework {
2121
void OpProtoAndCheckerMaker::Validate() {
2222
validated_ = true;
2323
CheckNoDuplicatedInOutAttrs();
24-
CheckReuseVars();
2524
}
2625

2726
OpProtoAndCheckerMaker::VariableBuilder OpProtoAndCheckerMaker::AddInput(
@@ -40,40 +39,6 @@ OpProtoAndCheckerMaker::VariableBuilder OpProtoAndCheckerMaker::AddOutput(
4039
return OpProtoAndCheckerMaker::VariableBuilder{output};
4140
}
4241

43-
void OpProtoAndCheckerMaker::Reuse(const std::string& name,
44-
const std::string& reused_name) {
45-
bool found = false;
46-
proto::OpProto::Var* var;
47-
48-
for (auto& var : proto_->inputs()) {
49-
if (var.name() == reused_name) {
50-
found = true;
51-
break;
52-
}
53-
}
54-
PADDLE_ENFORCE(found == true,
55-
"Input/Output name: %s reused_name: %s, one of them is not "
56-
"exists or not matched.",
57-
name, reused_name);
58-
59-
found = false;
60-
for (int i = 0; i < proto_->outputs().size(); ++i) {
61-
var = proto_->mutable_outputs()->Mutable(i);
62-
if (var->name() == name) {
63-
PADDLE_ENFORCE(!var->has_reuse(),
64-
"Output(%s) has been set reused var of %s", name,
65-
var->reuse());
66-
found = true;
67-
var->set_reuse(reused_name);
68-
break;
69-
}
70-
}
71-
PADDLE_ENFORCE(found == true,
72-
"Input/Output name: %s reused_name: %s, one of them is not "
73-
"exists or not matched.",
74-
name, reused_name);
75-
}
76-
7742
void OpProtoAndCheckerMaker::CheckNoDuplicatedInOutAttrs() {
7843
std::unordered_set<std::string> names;
7944
auto checker = [&](const std::string& name) {
@@ -91,24 +56,6 @@ void OpProtoAndCheckerMaker::CheckNoDuplicatedInOutAttrs() {
9156
}
9257
}
9358

94-
void OpProtoAndCheckerMaker::CheckReuseVars() {
95-
std::unordered_set<std::string> names;
96-
for (auto& input : proto_->inputs()) {
97-
names.insert(input.name());
98-
}
99-
auto checker = [&](const std::string& name, const std::string& reused) {
100-
PADDLE_ENFORCE(
101-
names.count(reused),
102-
"Output [%s] reuse Input [%s], but the input is not registered.", name,
103-
reused);
104-
};
105-
for (auto& output : proto_->outputs()) {
106-
if (output.has_reuse()) {
107-
checker(output.name(), output.reuse());
108-
}
109-
}
110-
}
111-
11259
void OpProtoAndCheckerMaker::operator()(proto::OpProto* proto,
11360
OpAttrChecker* attr_checker) {
11461
proto_ = proto;

paddle/fluid/framework/op_proto_maker.h

Lines changed: 0 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,6 @@ limitations under the License. */
1414
#pragma once
1515

1616
#include <string>
17-
#include <unordered_set>
18-
1917
#include "glog/logging.h"
2018
#include "paddle/fluid/framework/attribute.h"
2119
#include "paddle/fluid/framework/framework.pb.h"
@@ -73,20 +71,13 @@ class OpProtoAndCheckerMaker {
7371
var_->set_dispensable(true);
7472
return *this;
7573
}
76-
77-
VariableBuilder &Reuse(const std::string &name) {
78-
var_->set_reuse(name);
79-
return *this;
80-
}
8174
};
8275

8376
VariableBuilder AddInput(const std::string &name, const std::string &comment);
8477

8578
VariableBuilder AddOutput(const std::string &name,
8679
const std::string &comment);
8780

88-
void Reuse(const std::string &name, const std::string &reused_name);
89-
9081
template <typename T>
9182
TypedAttrChecker<T> &AddAttr(const std::string &name,
9283
const std::string &comment,
@@ -105,8 +96,6 @@ class OpProtoAndCheckerMaker {
10596
void CheckNoDuplicatedInOutAttrs();
10697
void Validate();
10798

108-
void CheckReuseVars();
109-
11099
proto::OpProto *proto_;
111100
OpAttrChecker *op_checker_;
112101
bool validated_{false};

paddle/fluid/framework/op_proto_maker_test.cc

Lines changed: 0 additions & 117 deletions
Original file line numberDiff line numberDiff line change
@@ -47,120 +47,3 @@ TEST(ProtoMaker, DuplicatedInOut) {
4747
ASSERT_THROW(proto_maker(&op_proto, &op_checker),
4848
paddle::platform::EnforceNotMet);
4949
}
50-
51-
class TestInplaceProtoMaker : public paddle::framework::OpProtoAndCheckerMaker {
52-
public:
53-
void Make() {
54-
AddInput("X", "input of test op");
55-
AddOutput("XOut", "output of test op").Reuse("X");
56-
}
57-
};
58-
59-
class TestInplaceProtoMaker2
60-
: public paddle::framework::OpProtoAndCheckerMaker {
61-
public:
62-
void Make() {
63-
AddInput("X", "input of test op");
64-
AddOutput("XOut", "output of test op").Reuse("X");
65-
AddOutput("NoOut", "output of test op").Reuse("NotExists");
66-
}
67-
};
68-
69-
TEST(ProtoMaker, InplaceOutput) {
70-
paddle::framework::proto::OpProto op_proto, op_proto2;
71-
paddle::framework::OpAttrChecker op_checker;
72-
TestInplaceProtoMaker proto_maker;
73-
TestInplaceProtoMaker2 proto_maker2;
74-
75-
proto_maker(&op_proto, &op_checker);
76-
77-
ASSERT_THROW(proto_maker2(&op_proto2, &op_checker),
78-
paddle::platform::EnforceNotMet);
79-
}
80-
81-
// normal reuse
82-
class TestReuseProtoMaker : public paddle::framework::OpProtoAndCheckerMaker {
83-
public:
84-
void Make() {
85-
AddInput("X", "input of test op");
86-
AddInput("Y", "input of test op");
87-
AddOutput("Out", "output of test op");
88-
AddOutput("XOut", "output of test op");
89-
// avoid destructor exception.
90-
// Validate();
91-
TestReuse();
92-
}
93-
94-
virtual void TestReuse() {}
95-
};
96-
97-
// test duplicate reuse error
98-
class TestReuseProtoMaker2 : public TestReuseProtoMaker {
99-
public:
100-
void TestReuse() {
101-
Reuse("Out", "X");
102-
Reuse("Out", "Y");
103-
}
104-
};
105-
106-
// NotExists Input
107-
class TestReuseProtoMaker3 : public TestReuseProtoMaker {
108-
public:
109-
void TestReuse() {
110-
Reuse("Out", "NotExists");
111-
Reuse("XOut", "X");
112-
}
113-
};
114-
115-
// NotExists Output
116-
class TestReuseProtoMaker4 : public TestReuseProtoMaker {
117-
public:
118-
void TestReuse() { Reuse("NotExists", "X"); }
119-
};
120-
121-
TEST(ProtoMaker, Reuse) {
122-
paddle::framework::proto::OpProto op_proto;
123-
paddle::framework::OpAttrChecker op_checker;
124-
TestReuseProtoMaker proto_maker;
125-
proto_maker(&op_proto, &op_checker);
126-
}
127-
128-
// NOTE(dzhwinter):
129-
// There is a Fatal CHECK on base class destructor, which will call abort inside
130-
// instead of
131-
// throw an exception. If we throw an exception in Make(), we will trigger the
132-
// CHECK and terminate the tests.
133-
//
134-
// I had tried to replace the default CHECK with a exception, however, it's
135-
// still not supported by glog.
136-
// the details:
137-
// https://github.com/google/glog/issues/249
138-
// https://github.com/facebookresearch/TensorComprehensions/issues/351
139-
/*
140-
TEST(ProtoMaker, ReuseWithException) {
141-
paddle::framework::proto::OpProto op_proto2, op_proto3, op_proto4;
142-
paddle::framework::OpAttrChecker op_checker;
143-
TestReuseProtoMaker2 proto_maker2;
144-
TestReuseProtoMaker3 proto_maker3;
145-
TestReuseProtoMaker4 proto_maker4;
146-
EXPECT_THROW(proto_maker2(&op_proto2, &op_checker),
147-
paddle::platform::EnforceNotMet);
148-
149-
EXPECT_THROW(proto_maker3(&op_proto3, &op_checker),
150-
paddle::platform::EnforceNotMet);
151-
152-
EXPECT_THROW(proto_maker4(&op_proto4, &op_checker),
153-
paddle::platform::EnforceNotMet);
154-
}
155-
156-
void FailureFunction() {
157-
throw std::runtime_error("Check failed in destructor.");
158-
// return 0;
159-
}
160-
161-
int main(int argc, char** argv) {
162-
testing::InitGoogleTest(&argc, argv);
163-
google::InstallFailureFunction(&FailureFunction);
164-
return RUN_ALL_TESTS();
165-
}
166-
*/

paddle/fluid/framework/parallel_executor.cc

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -156,14 +156,6 @@ ParallelExecutor::ParallelExecutor(
156156
params, member_->local_scopes_, member_->use_cuda_);
157157
#endif
158158

159-
if (VLOG_IS_ON(5)) {
160-
// If the loss_var_name is given, the number of graph should be only one.
161-
if (loss_var_name.size()) {
162-
PADDLE_ENFORCE_EQ(ir::GraphNum(*graph), 1,
163-
"The number of graph should be only one");
164-
}
165-
}
166-
167159
if (exec_strategy.type_ == ExecutionStrategy::kDefault) {
168160
member_->executor_.reset(new details::ThreadedSSAGraphExecutor(
169161
exec_strategy, member_->local_scopes_, places, std::move(graph)));

paddle/fluid/inference/api/demo_ci/run.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ else
2121
fi
2222

2323
USE_TENSORRT=OFF
24-
if [ [-d"$TENSORRT_INCLUDE_DIR"] -a [-d"$TENSORRT_LIB_DIR"] ]; then
24+
if [ -d "$TENSORRT_INCLUDE_DIR" -a -d "$TENSORRT_LIB_DIR" ]; then
2525
USE_TENSORRT=ON
2626
fi
2727

paddle/fluid/inference/tensorrt/convert/pool2d_op.cc

Lines changed: 40 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -42,16 +42,22 @@ class Pool2dOpConverter : public OpConverter {
4242
boost::get<std::vector<int>>(op_desc.GetAttr("strides"));
4343
std::vector<int> paddings =
4444
boost::get<std::vector<int>>(op_desc.GetAttr("paddings"));
45+
bool ceil_mode = boost::get<bool>(op_desc.GetAttr("ceil_mode"));
4546

47+
nvinfer1::Dims input_shape = input1->getDimensions();
48+
int nbDims = input_shape.nbDims;
4649
nvinfer1::DimsHW nv_ksize(ksize[0], ksize[1]);
50+
nvinfer1::DimsHW nv_strides(strides[0], strides[1]);
51+
nvinfer1::DimsHW nv_paddings(paddings[0], paddings[1]);
52+
4753
if (global_pooling == true) {
48-
nvinfer1::Dims input_shape = input1->getDimensions();
49-
int nbDims = input_shape.nbDims;
5054
nv_ksize.d[0] = input_shape.d[nbDims - 2];
5155
nv_ksize.d[1] = input_shape.d[nbDims - 1];
56+
nv_strides.h() = 1;
57+
nv_strides.w() = 1;
58+
nv_paddings.h() = 0;
59+
nv_paddings.w() = 0;
5260
}
53-
const nvinfer1::DimsHW nv_strides(strides[0], strides[1]);
54-
const nvinfer1::DimsHW nv_paddings(paddings[0], paddings[1]);
5561

5662
PADDLE_ENFORCE_EQ(input1->getDimensions().nbDims, 3UL);
5763

@@ -64,6 +70,36 @@ class Pool2dOpConverter : public OpConverter {
6470
PADDLE_THROW("TensorRT unsupported pooling type!");
6571
}
6672

73+
if (ceil_mode) {
74+
nvinfer1::DimsHW pre_pad(0, 0);
75+
nvinfer1::DimsHW post_pad(0, 0);
76+
int input_height = input_shape.d[nbDims - 2];
77+
int input_width = input_shape.d[nbDims - 1];
78+
int floor_h_output_size =
79+
(input_height - ksize[0] + 2 * paddings[0]) / strides[0] + 1;
80+
int ceil_h_output_size =
81+
(input_height - ksize[0] + 2 * paddings[0] + strides[0] - 1) /
82+
strides[0] +
83+
1;
84+
85+
int floor_w_output_size =
86+
(input_width - ksize[1] + 2 * paddings[1]) / strides[1] + 1;
87+
int ceil_w_output_size =
88+
(input_width - ksize[1] + 2 * paddings[1] + strides[1] - 1) /
89+
strides[1] +
90+
1;
91+
if (floor_h_output_size != ceil_h_output_size) {
92+
post_pad.h() = strides[0] - 1;
93+
}
94+
95+
if (floor_w_output_size != ceil_w_output_size) {
96+
post_pad.w() = strides[1] - 1;
97+
}
98+
auto* layer = TRT_ENGINE_ADD_LAYER(
99+
engine_, Padding, *const_cast<nvinfer1::ITensor*>(input1), pre_pad,
100+
post_pad);
101+
input1 = layer->getOutput(0);
102+
}
67103
auto* layer = TRT_ENGINE_ADD_LAYER(engine_, Pooling,
68104
*const_cast<nvinfer1::ITensor*>(input1),
69105
nv_pool_type, nv_ksize);

paddle/fluid/inference/tensorrt/convert/test_pool2d_op.cc

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -20,26 +20,28 @@ namespace paddle {
2020
namespace inference {
2121
namespace tensorrt {
2222

23-
void test_pool2d(bool global_pooling) {
23+
void test_pool2d(bool global_pooling, bool ceil_mode) {
2424
framework::Scope scope;
2525
std::unordered_set<std::string> parameters;
2626
TRTConvertValidation validator(5, parameters, scope, 1 << 15);
2727

2828
// The ITensor's Dims should not contain the batch size.
2929
// So, the ITensor's Dims of input and output should be C * H * W.
30-
validator.DeclInputVar("pool2d-X", nvinfer1::Dims3(3, 4, 4));
30+
validator.DeclInputVar("pool2d-X", nvinfer1::Dims3(3, 13, 14));
3131
if (global_pooling)
3232
validator.DeclOutputVar("pool2d-Out", nvinfer1::Dims3(3, 1, 1));
33+
else if (ceil_mode)
34+
validator.DeclOutputVar("pool2d-Out", nvinfer1::Dims3(3, 6, 7));
3335
else
34-
validator.DeclOutputVar("pool2d-Out", nvinfer1::Dims3(3, 2, 2));
36+
validator.DeclOutputVar("pool2d-Out", nvinfer1::Dims3(3, 6, 6));
3537

3638
// Prepare Op description
3739
framework::OpDesc desc;
3840
desc.SetType("pool2d");
3941
desc.SetInput("X", {"pool2d-X"});
4042
desc.SetOutput("Out", {"pool2d-Out"});
4143

42-
std::vector<int> ksize({2, 2});
44+
std::vector<int> ksize({3, 3});
4345
std::vector<int> strides({2, 2});
4446
std::vector<int> paddings({0, 0});
4547
std::string pooling_t = "max";
@@ -49,6 +51,7 @@ void test_pool2d(bool global_pooling) {
4951
desc.SetAttr("strides", strides);
5052
desc.SetAttr("paddings", paddings);
5153
desc.SetAttr("global_pooling", global_pooling);
54+
desc.SetAttr("ceil_mode", ceil_mode);
5255

5356
LOG(INFO) << "set OP";
5457
validator.SetOp(*desc.Proto());
@@ -57,9 +60,10 @@ void test_pool2d(bool global_pooling) {
5760
validator.Execute(3);
5861
}
5962

60-
TEST(Pool2dOpConverter, normal) { test_pool2d(false); }
63+
TEST(Pool2dOpConverter, normal) { test_pool2d(false, false); }
64+
TEST(Pool2dOpConverter, test_global_pooling) { test_pool2d(true, false); }
6165

62-
TEST(Pool2dOpConverter, test_global_pooling) { test_pool2d(true); }
66+
TEST(Pool2dOpConverter, test_ceil_mode) { test_pool2d(false, true); }
6367

6468
} // namespace tensorrt
6569
} // namespace inference

paddle/fluid/operators/activation_op.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ using paddle::framework::Tensor;
2828
public: \
2929
void Make() override { \
3030
AddInput("X", "Input of " #OP_NAME " operator"); \
31-
AddOutput("Out", "Output of " #OP_NAME " operator").Reuse("X"); \
31+
AddOutput("Out", "Output of " #OP_NAME " operator"); \
3232
AddAttr<bool>("use_mkldnn", \
3333
"(bool, default false) Only used in mkldnn kernel") \
3434
.SetDefault(false); \

0 commit comments

Comments
 (0)