Skip to content

Commit 91fb015

Browse files
authored
Memory/reshape op (#12414)
* "remove inplace in single op" * "fix ci" * "add transpiler case" * fix conflict * "fix reshape" * "delete reshape inplace attr" * "follo the comments" * "rerun ci"
1 parent 9751345 commit 91fb015

File tree

4 files changed

+36
-34
lines changed

4 files changed

+36
-34
lines changed

paddle/fluid/operators/reshape_op.cc

Lines changed: 5 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -127,12 +127,6 @@ class ReshapeOpMaker : public framework::OpProtoAndCheckerMaker {
127127
AddOutput("Out", "(Tensor). The output tensor of reshape operator.");
128128
AddAttr<std::vector<int>>(
129129
"shape", "(std::vector<int>) Target shape of reshape operator.");
130-
AddAttr<bool>("inplace",
131-
"(default: false) Change the source tensor's shape without "
132-
"memory copy. When Attr(inplace) is set true, the output "
133-
"tensor shares memory with Input(X), otherwise, a new output "
134-
"tensor is created, and its data are copied from Input(x).")
135-
.SetDefault(false);
136130
AddComment(R"DOC(
137131
Reshape Operator.
138132
@@ -233,16 +227,9 @@ class ReshapeKernel {
233227
"sequence_reshape op.");
234228
}
235229

236-
bool inplace = ctx.Attr<bool>("inplace");
230+
out->mutable_data(ctx.GetPlace(), in->type());
231+
framework::TensorCopySync(*in, ctx.GetPlace(), out);
237232
out->Resize(out_dims);
238-
if (!inplace) {
239-
out->mutable_data(ctx.GetPlace(), in->type());
240-
framework::TensorCopySync(*in, ctx.GetPlace(), out);
241-
out->Resize(out_dims);
242-
} else {
243-
out->ShareDataWith(*in);
244-
out->Resize(out_dims);
245-
}
246233
}
247234
};
248235

@@ -251,19 +238,11 @@ class ReshapeGradKernel {
251238
void operator()(const framework::ExecutionContext &ctx) const {
252239
auto *d_out = ctx.Input<framework::Tensor>(framework::GradVarName("Out"));
253240
auto *d_x = ctx.Output<framework::Tensor>(framework::GradVarName("X"));
241+
auto in_dims = d_x->dims();
254242

255243
d_x->mutable_data(ctx.GetPlace(), d_out->type());
256-
bool inplace = ctx.Attr<bool>("inplace");
257-
258-
auto in_dims = d_x->dims();
259-
if (!inplace) {
260-
framework::TensorCopy(*d_out, ctx.GetPlace(), ctx.device_context(), d_x);
261-
ctx.device_context().Wait();
262-
d_x->Resize(in_dims);
263-
} else {
264-
d_x->ShareDataWith(*d_out);
265-
d_x->Resize(in_dims);
266-
}
244+
framework::TensorCopySync(*d_out, ctx.GetPlace(), d_x);
245+
d_x->Resize(in_dims);
267246
}
268247
};
269248

python/paddle/fluid/layers/nn.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4473,15 +4473,14 @@ def reshape(x, shape, actual_shape=None, act=None, inplace=True, name=None):
44734473
"except one unknown dimension.")
44744474

44754475
helper = LayerHelper("reshape", **locals())
4476-
reshaped = helper.create_tmp_variable(dtype=x.dtype)
4476+
out = helper.create_tmp_variable(dtype=x.dtype)
44774477
helper.append_op(
44784478
type="reshape",
44794479
inputs=inputs,
4480-
attrs={"shape": shape,
4481-
"inplace": inplace},
4482-
outputs={"Out": reshaped})
4480+
attrs={"shape": shape},
4481+
outputs={"Out": out})
44834482

4484-
return helper.append_activation(reshaped)
4483+
return helper.append_activation(out)
44854484

44864485

44874486
def lod_reset(x, y=None, target_lod=None):

python/paddle/fluid/tests/unittests/test_memory_optimization_transpiler.py

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -43,5 +43,29 @@ def test_control_flow_graph(self):
4343
print(str(result_program))
4444

4545

46+
class TestMemoryTranspiler2(unittest.TestCase):
47+
def setUp(self):
48+
program = Program()
49+
with program_guard(program, startup_program=Program()):
50+
x = layers.data(name='x', shape=[13], dtype='float32')
51+
fc = layers.fc(input=x, size=10, act=None)
52+
reshape = layers.reshape(x=fc, shape=[-1, 2, 5])
53+
fc = layers.reshape(x=reshape, shape=[-1, 5, 2])
54+
y_predict = layers.fc(input=fc, size=1, act=None)
55+
y = layers.data(name='y', shape=[1], dtype='float32')
56+
cost = layers.square_error_cost(input=y_predict, label=y)
57+
avg_cost = layers.mean(cost)
58+
opt = optimizer.SGD(learning_rate=0.001)
59+
opt.minimize(avg_cost)
60+
self.program = program
61+
62+
def test_inplace_ops(self):
63+
print("before optimization")
64+
print(str(self.program))
65+
result_program = memory_optimize(self.program)
66+
print("after optimization")
67+
print(str(result_program))
68+
69+
4670
if __name__ == "__main__":
4771
unittest.main()

python/paddle/fluid/tests/unittests/test_reshape_op.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ def setUp(self):
2525

2626
self.op_type = "reshape"
2727
self.inputs = {"X": np.random.random(ori_shape).astype("float32")}
28-
self.attrs = {"shape": new_shape, "inplace": False}
28+
self.attrs = {"shape": new_shape}
2929
self.outputs = {"Out": self.inputs["X"].reshape(new_shape)}
3030

3131
def test_check_output(self):
@@ -42,7 +42,7 @@ def setUp(self):
4242

4343
self.op_type = "reshape"
4444
self.inputs = {"X": np.random.random(ori_shape).astype("float32")}
45-
self.attrs = {"shape": new_shape, "inplace": False}
45+
self.attrs = {"shape": new_shape}
4646
self.outputs = {"Out": self.inputs["X"].reshape(self.attrs["shape"])}
4747

4848
def test_check_output(self):
@@ -60,7 +60,7 @@ def setUp(self):
6060

6161
self.op_type = "reshape"
6262
self.inputs = {"X": np.random.random(ori_shape).astype("float32")}
63-
self.attrs = {"shape": new_shape, "inplace": False}
63+
self.attrs = {"shape": new_shape}
6464
self.outputs = {"Out": self.inputs["X"].reshape(infered_shape)}
6565

6666
def test_check_output(self):

0 commit comments

Comments
 (0)