Skip to content

Commit 6720681

Browse files
kexinzhaoXreki
authored andcommitted
Enable is_test attr of batch norm and drop out op for test program (#8642)
* fix is_test issue * add paddle enforce * fix bug * add new func * small fix * address comments
1 parent f45a82b commit 6720681

File tree

5 files changed

+34
-19
lines changed

5 files changed

+34
-19
lines changed

paddle/fluid/framework/prune.cc

Lines changed: 12 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -27,8 +27,6 @@ namespace framework {
2727

2828
const std::string kFeedOpType = "feed";
2929
const std::string kFetchOpType = "fetch";
30-
const std::string kDropOutOpType = "dropout";
31-
const std::string kBatchNormOpType = "batch_norm";
3230

3331
bool HasDependentVar(const proto::OpDesc& op_desc,
3432
const std::set<std::string>& dependent_vars) {
@@ -186,26 +184,26 @@ void Prune(const proto::ProgramDesc& input, proto::ProgramDesc* output) {
186184
prune_impl(input, output, 0, -1, dependent_vars);
187185
}
188186

189-
void inference_optimize_impl(const proto::ProgramDesc& input,
190-
proto::ProgramDesc* output, int block_id) {
191-
*output = input;
192-
auto* op_field = output->mutable_blocks(block_id)->mutable_ops();
187+
void inference_optimize_impl(proto::ProgramDesc* input, int block_id) {
188+
auto* op_field = input->mutable_blocks(block_id)->mutable_ops();
193189
for (auto& op_desc : *op_field) {
194-
if (op_desc.type() == kDropOutOpType ||
195-
op_desc.type() == kBatchNormOpType) {
196-
for (auto& attr : *op_desc.mutable_attrs()) {
197-
if (attr.name() == "is_test") {
198-
attr.set_b(true);
199-
break;
200-
}
190+
for (auto& attr : *op_desc.mutable_attrs()) {
191+
if (attr.name() == "is_test") {
192+
attr.set_b(true);
193+
break;
201194
}
202195
}
203196
}
204197
}
205198

206199
void InferenceOptimize(const proto::ProgramDesc& input,
207200
proto::ProgramDesc* output) {
208-
inference_optimize_impl(input, output, 0);
201+
*output = input;
202+
int num_blocks = output->blocks_size();
203+
PADDLE_ENFORCE_GT(num_blocks, 0, "ProgramDesc must have at least one block");
204+
for (int i = 0; i < num_blocks; ++i) {
205+
inference_optimize_impl(output, i);
206+
}
209207
}
210208

211209
} // namespace framework

python/paddle/fluid/framework.py

Lines changed: 19 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -956,9 +956,26 @@ def to_string(self, throw_on_error, with_details=False):
956956
def get_desc(self):
957957
return self.desc
958958

959-
def clone(self):
959+
def clone(self, for_test=False):
960+
"""Clone the Program object
961+
962+
Set for_test to False when we want to clone the program for training.
963+
Set for_test to True when we want to clone the program for testing.
964+
965+
Args:
966+
for_test(bool): Some operators, such as batch_norm and drop_out ops,
967+
behave differently in training and testing. If for_test is True,
968+
the is_test attributes in these operators will be set to True for
969+
testing purposes, otherwise, they remain unchanged.
970+
971+
Returns(Program):
972+
The cloned Program object.
973+
"""
960974
p = Program()
961-
p.desc = core.ProgramDesc(self.desc)
975+
if for_test:
976+
p.desc = core.inference_optimize(self.desc)
977+
else:
978+
p.desc = core.ProgramDesc(self.desc)
962979
p.blocks = [Block(p, i) for i in xrange(self.desc.num_blocks())]
963980
p.sync_with_cpp()
964981
p.copy_param_info_from(self)

python/paddle/fluid/tests/book/test_image_classification.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,7 @@ def train(net_type, use_cuda, save_dirname, is_local):
115115
acc = fluid.layers.accuracy(input=predict, label=label)
116116

117117
# Test program
118-
test_program = fluid.default_main_program().clone()
118+
test_program = fluid.default_main_program().clone(for_test=True)
119119

120120
optimizer = fluid.optimizer.Adam(learning_rate=0.001)
121121
optimize_ops, params_grads = optimizer.minimize(avg_cost)

python/paddle/fluid/tests/book/test_recognize_digits.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ def train(nn_type,
9292
else:
9393
prediction, avg_loss, acc = net_conf(img, label)
9494

95-
test_program = fluid.default_main_program().clone()
95+
test_program = fluid.default_main_program().clone(for_test=True)
9696

9797
optimizer = fluid.optimizer.Adam(learning_rate=0.001)
9898
optimize_ops, params_grads = optimizer.minimize(avg_loss)

python/paddle/fluid/tests/book/test_recommender_system.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -157,7 +157,7 @@ def train(use_cuda, save_dirname, is_local=True):
157157
scale_infer, avg_cost = model()
158158

159159
# test program
160-
test_program = fluid.default_main_program().clone()
160+
test_program = fluid.default_main_program().clone(for_test=True)
161161

162162
sgd_optimizer = SGDOptimizer(learning_rate=0.2)
163163
optimize_ops, params_grads = sgd_optimizer.minimize(avg_cost)

0 commit comments

Comments
 (0)