Skip to content

Commit cd69e1d

Browse files
authored
Merge pull request #13339 from chengduoZH/debug_while_op
Enable nested WhileOp
2 parents 552cdc1 + cc18fff commit cd69e1d

File tree

2 files changed

+25
-5
lines changed

2 files changed

+25
-5
lines changed

paddle/fluid/operators/while_op.cc

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ class WhileOp : public framework::OperatorBase {
6363
while (cond.data<bool>()[0]) {
6464
auto &current_scope = scope.NewScope();
6565
step_scopes->push_back(&current_scope);
66-
executor.RunPreparedContext(ctx.get(), &current_scope, false);
66+
executor.RunPreparedContext(ctx.get(), &current_scope, false, true, true);
6767
if (is_test) {
6868
scope.DeleteScope(&current_scope);
6969
}
@@ -169,7 +169,8 @@ class WhileGradOp : public framework::OperatorBase {
169169
}
170170
}
171171
}
172-
executor.RunPreparedContext(ctx.get(), *cur_scope_iter, false);
172+
executor.RunPreparedContext(ctx.get(), *cur_scope_iter, false, true,
173+
true);
173174

174175
auto &pg_names = Outputs(kXGRAD);
175176
auto &p_names = Inputs(kX);

python/paddle/fluid/tests/unittests/test_while_op.py

Lines changed: 22 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
1+
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
22
#
33
# Licensed under the Apache License, Version 2.0 (the "License");
44
# you may not use this file except in compliance with the License.
@@ -30,8 +30,10 @@ def test_simple_forward(self):
3030
"d1", shape=[10], append_batch_size=False, dtype='float32')
3131
d2 = layers.data(
3232
"d2", shape=[10], append_batch_size=False, dtype='float32')
33+
3334
i = layers.zeros(shape=[1], dtype='int64')
3435
i.stop_gradient = True
36+
3537
init = layers.zeros(shape=[10], dtype='float32')
3638
mem_array = layers.array_write(x=init, i=i)
3739
data_array = layers.array_write(x=d0, i=i)
@@ -45,11 +47,19 @@ def test_simple_forward(self):
4547
i = layers.zeros(shape=[1], dtype='int64')
4648
i.stop_gradient = True
4749

48-
array_len = layers.fill_constant(shape=[1], dtype='int64', value=3)
50+
array_len = layers.fill_constant(shape=[1], dtype='int64', value=1)
4951
array_len.stop_gradient = True
5052
cond = layers.less_than(x=i, y=array_len)
5153

54+
j = layers.fill_constant(shape=[1], dtype='int64', value=1)
55+
j.stop_gradient = True
56+
57+
array_len2 = layers.fill_constant(shape=[1], dtype='int64', value=3)
58+
array_len2.stop_gradient = True
59+
cond2 = layers.less_than(x=j, y=array_len2)
60+
5261
while_op = layers.While(cond=cond)
62+
while_op2 = layers.While(cond=cond2)
5363
with while_op.block():
5464
d = layers.array_read(array=data_array, i=i)
5565
prev = layers.array_read(array=mem_array, i=i)
@@ -59,7 +69,16 @@ def test_simple_forward(self):
5969
layers.array_write(result, i=i, array=mem_array)
6070
layers.less_than(x=i, y=array_len, cond=cond)
6171

62-
sum_result = layers.array_read(array=mem_array, i=i)
72+
with while_op2.block():
73+
d2 = layers.array_read(array=data_array, i=j)
74+
prev2 = layers.array_read(array=mem_array, i=j)
75+
result2 = layers.sums(input=[d2, prev2])
76+
77+
j = layers.increment(x=j, in_place=True)
78+
layers.array_write(result2, i=j, array=mem_array)
79+
layers.less_than(x=j, y=array_len2, cond=cond2)
80+
81+
sum_result = layers.array_read(array=mem_array, i=j)
6382
loss = layers.mean(sum_result)
6483

6584
append_backward(loss)

0 commit comments

Comments
 (0)