Skip to content

Commit df81915

Browse files
authored
[NPU] add randperm_op_npu (#35763) (#36026)
* add randperm_op_npu * fix test_set_value_op_npu
1 parent 2e473f2 commit df81915

File tree

4 files changed

+151
-1
lines changed

4 files changed

+151
-1
lines changed
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
/* Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
2+
3+
Licensed under the Apache License, Version 2.0 (the "License");
4+
you may not use this file except in compliance with the License.
5+
You may obtain a copy of the License at
6+
7+
http://www.apache.org/licenses/LICENSE-2.0
8+
9+
Unless required by applicable law or agreed to in writing, software
10+
distributed under the License is distributed on an "AS IS" BASIS,
11+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
See the License for the specific language governing permissions and
13+
limitations under the License. */
14+
15+
#include "paddle/fluid/operators/randperm_op.h"
16+
#include "paddle/fluid/framework/op_registry.h"
17+
18+
template <typename T>
19+
using kernel =
20+
paddle::operators::RandpermKernel<paddle::platform::NPUDeviceContext, T>;
21+
22+
REGISTER_OP_NPU_KERNEL(randperm, kernel<int64_t>, kernel<int>, kernel<float>,
23+
kernel<double>);

paddle/fluid/operators/utils.h

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -114,6 +114,11 @@ inline T GetValue(const framework::Tensor* x) {
114114
if (!platform::is_cpu_place(x->place())) {
115115
framework::Tensor cpu_x;
116116
framework::TensorCopy(*x, platform::CPUPlace(), &cpu_x);
117+
#ifdef PADDLE_WITH_ASCEND_CL
118+
platform::DeviceContextPool& pool = platform::DeviceContextPool::Instance();
119+
const platform::DeviceContext* dev_ctx = pool.Get(x->place());
120+
dev_ctx->Wait();
121+
#endif
117122
value = cpu_x.data<T>()[0];
118123
} else {
119124
value = x->data<T>()[0];
Lines changed: 122 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,122 @@
1+
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
from __future__ import print_function
16+
17+
import unittest
18+
import numpy as np
19+
import sys
20+
sys.path.append("..")
21+
from op_test import OpTest
22+
import paddle
23+
import paddle.fluid.core as core
24+
from paddle.static import program_guard, Program
25+
from test_randperm_op import check_randperm_out, error_msg, convert_dtype
26+
27+
paddle.enable_static()
28+
29+
30+
class TestRandpermOp(OpTest):
31+
""" Test randperm op."""
32+
33+
def setUp(self):
34+
self.set_npu()
35+
self.op_type = "randperm"
36+
self.n = 200
37+
self.dtype = "int64"
38+
39+
self.inputs = {}
40+
self.outputs = {"Out": np.zeros((self.n)).astype(self.dtype)}
41+
self.init_attrs()
42+
self.attrs = {
43+
"n": self.n,
44+
"dtype": convert_dtype(self.dtype),
45+
}
46+
47+
def set_npu(self):
48+
self.__class__.use_npu = True
49+
50+
def _get_places(self):
51+
return [paddle.NPUPlace(0)]
52+
53+
def init_attrs(self):
54+
pass
55+
56+
def test_check_output(self):
57+
self.check_output_customized(self.verify_output)
58+
59+
def verify_output(self, outs):
60+
out_np = np.array(outs[0])
61+
self.assertTrue(
62+
check_randperm_out(self.n, out_np), msg=error_msg(out_np))
63+
64+
65+
class TestRandpermOpN(TestRandpermOp):
66+
def init_attrs(self):
67+
self.n = 10000
68+
69+
70+
class TestRandpermOpInt32(TestRandpermOp):
71+
def init_attrs(self):
72+
self.dtype = "int32"
73+
74+
75+
class TestRandpermOpFloat32(TestRandpermOp):
76+
def init_attrs(self):
77+
self.dtype = "float32"
78+
79+
80+
class TestRandpermOpFloat64(TestRandpermOp):
81+
def init_attrs(self):
82+
self.dtype = "float64"
83+
84+
85+
class TestRandpermOpError(unittest.TestCase):
86+
def test_errors(self):
87+
with program_guard(Program(), Program()):
88+
self.assertRaises(ValueError, paddle.randperm, -3)
89+
self.assertRaises(TypeError, paddle.randperm, 10, 'int8')
90+
91+
92+
class TestRandpermAPI(unittest.TestCase):
93+
def test_out(self):
94+
n = 10
95+
place = paddle.NPUPlace(0)
96+
with program_guard(Program(), Program()):
97+
x1 = paddle.randperm(n)
98+
x2 = paddle.randperm(n, 'float32')
99+
100+
exe = paddle.static.Executor(place)
101+
res = exe.run(fetch_list=[x1, x2])
102+
103+
self.assertEqual(res[0].dtype, np.int64)
104+
self.assertEqual(res[1].dtype, np.float32)
105+
self.assertTrue(check_randperm_out(n, res[0]))
106+
self.assertTrue(check_randperm_out(n, res[1]))
107+
108+
109+
class TestRandpermImperative(unittest.TestCase):
110+
def test_out(self):
111+
paddle.disable_static(paddle.NPUPlace(0))
112+
n = 10
113+
for dtype in ['int32', np.int64, 'float32', 'float64']:
114+
data_p = paddle.randperm(n, dtype)
115+
data_np = data_p.numpy()
116+
self.assertTrue(
117+
check_randperm_out(n, data_np), msg=error_msg(data_np))
118+
paddle.enable_static()
119+
120+
121+
if __name__ == "__main__":
122+
unittest.main()

python/paddle/fluid/tests/unittests/npu/test_set_value_op_npu.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ def _run_static(self):
6868
return out
6969

7070
def _run_dynamic(self):
71-
paddle.disable_static()
71+
paddle.disable_static(paddle.NPUPlace(0))
7272
x = paddle.ones(shape=self.shape, dtype=self.dtype)
7373
self._call_setitem(x)
7474
out = x.numpy()

0 commit comments

Comments
 (0)