Skip to content

Commit b294353

Browse files
committed
Merge branch 'develop' of https://github.com/PaddlePaddle/Paddle into map_api
2 parents 80ee069 + 3cab25a commit b294353

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

71 files changed

+1059
-824
lines changed

paddle/fluid/framework/framework.proto

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,6 @@ message OpProto {
8080
optional bool duplicable = 3 [ default = false ];
8181
optional bool intermediate = 4 [ default = false ];
8282
optional bool dispensable = 5 [ default = false ];
83-
optional string reuse = 6;
8483
}
8584

8685
// AttrProto describes the C++ type Attribute.

paddle/fluid/framework/ir/CMakeLists.txt

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -42,12 +42,10 @@ if(WITH_MKLDNN)
4242
pass_library(mkldnn_placement_pass base)
4343
pass_library(conv_bias_mkldnn_fuse_pass inference)
4444
pass_library(conv_relu_mkldnn_fuse_pass inference)
45+
pass_library(conv_elementwise_add_mkldnn_fuse_pass inference)
4546
endif()
4647

4748
cc_library(fuse_elewise_add_act_pass SRCS fuse_elewise_add_act_pass.cc DEPS pass graph_pattern_detector )
48-
if(WITH_MKLDNN)
49-
pass_library(conv_elementwise_add_mkldnn_fuse_pass inference)
50-
endif()
5149

5250
set(GLOB_PASS_LIB ${PASS_LIBRARY} CACHE INTERNAL "Global PASS library")
5351

paddle/fluid/framework/ir/graph_helper_test.cc

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -200,15 +200,15 @@ TEST(GraphHelperTest, GraphNum) {
200200

201201
Graph g(prog);
202202
BuildZeroGraph(&g);
203-
ASSERT_EQ(GraphNum(g), 0);
203+
ASSERT_EQ(GraphNum(g), 0UL);
204204

205205
Graph g2(prog);
206206
BuildOneGraph(&g2);
207-
ASSERT_EQ(GraphNum(g2), 1);
207+
ASSERT_EQ(GraphNum(g2), 1UL);
208208

209209
Graph g3(prog);
210210
BuildTwoGraphs(&g3);
211-
ASSERT_EQ(GraphNum(g3), 2);
211+
ASSERT_EQ(GraphNum(g3), 2UL);
212212
}
213213

214214
} // namespace ir

paddle/fluid/framework/ir/graph_test.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -124,7 +124,7 @@ TEST(GraphTest, Basic) {
124124
ASSERT_EQ(n->outputs.size(), 0UL);
125125
}
126126
}
127-
ASSERT_EQ(nodes.size(), 5);
127+
ASSERT_EQ(nodes.size(), 5UL);
128128
}
129129

130130
TEST(GraphTest, WriteAfterRead) {

paddle/fluid/framework/op_desc.cc

Lines changed: 5 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -515,20 +515,14 @@ void OpDesc::InferShape(const BlockDesc &block) const {
515515
}
516516

517517
void OpDesc::InferVarType(BlockDesc *block) const {
518+
// There are a few places that var type can be set.
519+
// When VarDesc is created, default set to LOD_TENSOR.
520+
// When output variable is created, default is defaut set to LOD_TENSOR.
521+
// We limit here to be the only place that operator defines its customized
522+
// var type inference. Hence, we don't do any "default" setting here.
518523
auto &info = OpInfoMap::Instance().Get(this->Type());
519524
if (info.infer_var_type_) {
520525
info.infer_var_type_(*this, block);
521-
} else {
522-
// all output type is LoDTensor by default
523-
VLOG(10) << this->Type()
524-
<< " has not registered InferVarType. Set output variables to "
525-
"LOD_TENSOR";
526-
for (auto &out_pair : this->outputs_) {
527-
for (auto &out_var_name : out_pair.second) {
528-
block->FindRecursiveOrCreateVar(out_var_name)
529-
.SetType(proto::VarType::LOD_TENSOR);
530-
}
531-
}
532526
}
533527
}
534528

paddle/fluid/framework/op_proto_maker.cc

Lines changed: 0 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@ namespace framework {
2121
void OpProtoAndCheckerMaker::Validate() {
2222
validated_ = true;
2323
CheckNoDuplicatedInOutAttrs();
24-
CheckReuseVars();
2524
}
2625

2726
OpProtoAndCheckerMaker::VariableBuilder OpProtoAndCheckerMaker::AddInput(
@@ -40,40 +39,6 @@ OpProtoAndCheckerMaker::VariableBuilder OpProtoAndCheckerMaker::AddOutput(
4039
return OpProtoAndCheckerMaker::VariableBuilder{output};
4140
}
4241

43-
void OpProtoAndCheckerMaker::Reuse(const std::string& name,
44-
const std::string& reused_name) {
45-
bool found = false;
46-
proto::OpProto::Var* var;
47-
48-
for (auto& var : proto_->inputs()) {
49-
if (var.name() == reused_name) {
50-
found = true;
51-
break;
52-
}
53-
}
54-
PADDLE_ENFORCE(found == true,
55-
"Input/Output name: %s reused_name: %s, one of them is not "
56-
"exists or not matched.",
57-
name, reused_name);
58-
59-
found = false;
60-
for (int i = 0; i < proto_->outputs().size(); ++i) {
61-
var = proto_->mutable_outputs()->Mutable(i);
62-
if (var->name() == name) {
63-
PADDLE_ENFORCE(!var->has_reuse(),
64-
"Output(%s) has been set reused var of %s", name,
65-
var->reuse());
66-
found = true;
67-
var->set_reuse(reused_name);
68-
break;
69-
}
70-
}
71-
PADDLE_ENFORCE(found == true,
72-
"Input/Output name: %s reused_name: %s, one of them is not "
73-
"exists or not matched.",
74-
name, reused_name);
75-
}
76-
7742
void OpProtoAndCheckerMaker::CheckNoDuplicatedInOutAttrs() {
7843
std::unordered_set<std::string> names;
7944
auto checker = [&](const std::string& name) {
@@ -91,24 +56,6 @@ void OpProtoAndCheckerMaker::CheckNoDuplicatedInOutAttrs() {
9156
}
9257
}
9358

94-
void OpProtoAndCheckerMaker::CheckReuseVars() {
95-
std::unordered_set<std::string> names;
96-
for (auto& input : proto_->inputs()) {
97-
names.insert(input.name());
98-
}
99-
auto checker = [&](const std::string& name, const std::string& reused) {
100-
PADDLE_ENFORCE(
101-
names.count(reused),
102-
"Output [%s] reuse Input [%s], but the input is not registered.", name,
103-
reused);
104-
};
105-
for (auto& output : proto_->outputs()) {
106-
if (output.has_reuse()) {
107-
checker(output.name(), output.reuse());
108-
}
109-
}
110-
}
111-
11259
void OpProtoAndCheckerMaker::operator()(proto::OpProto* proto,
11360
OpAttrChecker* attr_checker) {
11461
proto_ = proto;

paddle/fluid/framework/op_proto_maker.h

Lines changed: 0 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,6 @@ limitations under the License. */
1414
#pragma once
1515

1616
#include <string>
17-
#include <unordered_set>
18-
1917
#include "glog/logging.h"
2018
#include "paddle/fluid/framework/attribute.h"
2119
#include "paddle/fluid/framework/framework.pb.h"
@@ -73,20 +71,13 @@ class OpProtoAndCheckerMaker {
7371
var_->set_dispensable(true);
7472
return *this;
7573
}
76-
77-
VariableBuilder &Reuse(const std::string &name) {
78-
var_->set_reuse(name);
79-
return *this;
80-
}
8174
};
8275

8376
VariableBuilder AddInput(const std::string &name, const std::string &comment);
8477

8578
VariableBuilder AddOutput(const std::string &name,
8679
const std::string &comment);
8780

88-
void Reuse(const std::string &name, const std::string &reused_name);
89-
9081
template <typename T>
9182
TypedAttrChecker<T> &AddAttr(const std::string &name,
9283
const std::string &comment,
@@ -105,8 +96,6 @@ class OpProtoAndCheckerMaker {
10596
void CheckNoDuplicatedInOutAttrs();
10697
void Validate();
10798

108-
void CheckReuseVars();
109-
11099
proto::OpProto *proto_;
111100
OpAttrChecker *op_checker_;
112101
bool validated_{false};

paddle/fluid/framework/op_proto_maker_test.cc

Lines changed: 0 additions & 117 deletions
Original file line numberDiff line numberDiff line change
@@ -47,120 +47,3 @@ TEST(ProtoMaker, DuplicatedInOut) {
4747
ASSERT_THROW(proto_maker(&op_proto, &op_checker),
4848
paddle::platform::EnforceNotMet);
4949
}
50-
51-
class TestInplaceProtoMaker : public paddle::framework::OpProtoAndCheckerMaker {
52-
public:
53-
void Make() {
54-
AddInput("X", "input of test op");
55-
AddOutput("XOut", "output of test op").Reuse("X");
56-
}
57-
};
58-
59-
class TestInplaceProtoMaker2
60-
: public paddle::framework::OpProtoAndCheckerMaker {
61-
public:
62-
void Make() {
63-
AddInput("X", "input of test op");
64-
AddOutput("XOut", "output of test op").Reuse("X");
65-
AddOutput("NoOut", "output of test op").Reuse("NotExists");
66-
}
67-
};
68-
69-
TEST(ProtoMaker, InplaceOutput) {
70-
paddle::framework::proto::OpProto op_proto, op_proto2;
71-
paddle::framework::OpAttrChecker op_checker;
72-
TestInplaceProtoMaker proto_maker;
73-
TestInplaceProtoMaker2 proto_maker2;
74-
75-
proto_maker(&op_proto, &op_checker);
76-
77-
ASSERT_THROW(proto_maker2(&op_proto2, &op_checker),
78-
paddle::platform::EnforceNotMet);
79-
}
80-
81-
// normal reuse
82-
class TestReuseProtoMaker : public paddle::framework::OpProtoAndCheckerMaker {
83-
public:
84-
void Make() {
85-
AddInput("X", "input of test op");
86-
AddInput("Y", "input of test op");
87-
AddOutput("Out", "output of test op");
88-
AddOutput("XOut", "output of test op");
89-
// avoid destructor exception.
90-
// Validate();
91-
TestReuse();
92-
}
93-
94-
virtual void TestReuse() {}
95-
};
96-
97-
// test duplicate reuse error
98-
class TestReuseProtoMaker2 : public TestReuseProtoMaker {
99-
public:
100-
void TestReuse() {
101-
Reuse("Out", "X");
102-
Reuse("Out", "Y");
103-
}
104-
};
105-
106-
// NotExists Input
107-
class TestReuseProtoMaker3 : public TestReuseProtoMaker {
108-
public:
109-
void TestReuse() {
110-
Reuse("Out", "NotExists");
111-
Reuse("XOut", "X");
112-
}
113-
};
114-
115-
// NotExists Output
116-
class TestReuseProtoMaker4 : public TestReuseProtoMaker {
117-
public:
118-
void TestReuse() { Reuse("NotExists", "X"); }
119-
};
120-
121-
TEST(ProtoMaker, Reuse) {
122-
paddle::framework::proto::OpProto op_proto;
123-
paddle::framework::OpAttrChecker op_checker;
124-
TestReuseProtoMaker proto_maker;
125-
proto_maker(&op_proto, &op_checker);
126-
}
127-
128-
// NOTE(dzhwinter):
129-
// There is a Fatal CHECK on base class destructor, which will call abort inside
130-
// instead of
131-
// throw an exception. If we throw an exception in Make(), we will trigger the
132-
// CHECK and terminate the tests.
133-
//
134-
// I had tried to replace the default CHECK with a exception, however, it's
135-
// still not supported by glog.
136-
// the details:
137-
// https://github.com/google/glog/issues/249
138-
// https://github.com/facebookresearch/TensorComprehensions/issues/351
139-
/*
140-
TEST(ProtoMaker, ReuseWithException) {
141-
paddle::framework::proto::OpProto op_proto2, op_proto3, op_proto4;
142-
paddle::framework::OpAttrChecker op_checker;
143-
TestReuseProtoMaker2 proto_maker2;
144-
TestReuseProtoMaker3 proto_maker3;
145-
TestReuseProtoMaker4 proto_maker4;
146-
EXPECT_THROW(proto_maker2(&op_proto2, &op_checker),
147-
paddle::platform::EnforceNotMet);
148-
149-
EXPECT_THROW(proto_maker3(&op_proto3, &op_checker),
150-
paddle::platform::EnforceNotMet);
151-
152-
EXPECT_THROW(proto_maker4(&op_proto4, &op_checker),
153-
paddle::platform::EnforceNotMet);
154-
}
155-
156-
void FailureFunction() {
157-
throw std::runtime_error("Check failed in destructor.");
158-
// return 0;
159-
}
160-
161-
int main(int argc, char** argv) {
162-
testing::InitGoogleTest(&argc, argv);
163-
google::InstallFailureFunction(&FailureFunction);
164-
return RUN_ALL_TESTS();
165-
}
166-
*/

paddle/fluid/framework/parallel_executor.cc

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -156,12 +156,10 @@ ParallelExecutor::ParallelExecutor(
156156
params, member_->local_scopes_, member_->use_cuda_);
157157
#endif
158158

159-
if (VLOG_IS_ON(5)) {
160-
// If the loss_var_name is given, the number of graph should be only one.
161-
if (loss_var_name.size()) {
162-
PADDLE_ENFORCE_EQ(ir::GraphNum(*graph), 1,
163-
"The number of graph should be only one");
164-
}
159+
// If the loss_var_name is given, the number of graph should be only one.
160+
if (loss_var_name.size()) {
161+
PADDLE_ENFORCE_EQ(ir::GraphNum(*graph), 1,
162+
"The number of graph should be only one");
165163
}
166164

167165
if (exec_strategy.type_ == ExecutionStrategy::kDefault) {

paddle/fluid/framework/program_desc_test.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@ TEST(ProgramDesc, copy_ctor) {
103103
ASSERT_EQ(1, op->GetBlockAttrId("sub_block"));
104104
found_sub_block = true;
105105

106-
ASSERT_EQ(2, op->GetBlocksAttrIds("sub_blocks").size());
106+
ASSERT_EQ(2UL, op->GetBlocksAttrIds("sub_blocks").size());
107107
found_sub_blocks = true;
108108
}
109109
}

0 commit comments

Comments
 (0)