Skip to content

Commit 41a3387

Browse files
zrr1999YqGe585
andauthored
Co-authored-by: Yuqiang Ge <[email protected]>
1 parent e799b7f commit 41a3387

File tree

1 file changed

+84
-84
lines changed

1 file changed

+84
-84
lines changed

backends/npu/tests/unittests/test_activation_op.py

Lines changed: 84 additions & 84 deletions
Original file line numberDiff line numberDiff line change
@@ -159,90 +159,90 @@ def ref_leaky_relu(x, alpha=0.01):
159159
return out
160160

161161

162-
# class TestLeakyRelu(TestActivation):
163-
# def get_alpha(self):
164-
# return 0.02
165-
166-
# def setUp(self):
167-
# self.set_npu()
168-
# self.op_type = "leaky_relu"
169-
# self.init_dtype()
170-
# self.init_shape()
171-
# alpha = self.get_alpha()
172-
173-
# np.random.seed(1024)
174-
# x = np.random.uniform(-1, 1, self.shape).astype(self.dtype)
175-
# # The same reason with TestAbs
176-
# x[np.abs(x) < 0.005] = 0.05
177-
# out = ref_leaky_relu(x, alpha)
178-
179-
# self.inputs = {"X": x}
180-
# self.outputs = {"Out": out}
181-
# self.attrs = {"alpha": alpha}
182-
183-
# def test_check_grad(self):
184-
# if self.dtype == np.float16:
185-
# return
186-
# self.check_grad_with_place(self.place, ["X"], "Out")
187-
188-
189-
# class TestLeakyReluAlpha1(TestLeakyRelu):
190-
# def get_alpha(self):
191-
# return 2
192-
193-
194-
# class TestLeakyReluAlpha2(TestLeakyRelu):
195-
# def get_alpha(self):
196-
# return -0.01
197-
198-
199-
# class TestLeakyReluAlpha3(TestLeakyRelu):
200-
# def get_alpha(self):
201-
# return -2.0
202-
203-
204-
# class TestLeakyRelu_ZeroDim(TestLeakyRelu):
205-
# def init_shape(self):
206-
# self.shape = []
207-
208-
209-
# class TestLeakyReluAPI(unittest.TestCase):
210-
# # test paddle.nn.LeakyReLU, paddle.nn.functional.leaky_relu,
211-
# def setUp(self):
212-
# np.random.seed(1024)
213-
# self.x_np = np.random.uniform(-1, 1, [10, 12]).astype("float32")
214-
# self.place = paddle.CustomPlace("npu", 0)
215-
216-
# def test_static_api(self):
217-
# paddle.enable_static()
218-
# with paddle.static.program_guard(paddle.static.Program()):
219-
# x = paddle.static.data("X", [10, 12])
220-
# out1 = F.leaky_relu(x)
221-
# m = paddle.nn.LeakyReLU()
222-
# out2 = m(x)
223-
# exe = paddle.static.Executor(self.place)
224-
# res = exe.run(feed={"X": self.x_np}, fetch_list=[out1, out2])
225-
# out_ref = ref_leaky_relu(self.x_np)
226-
# for r in res:
227-
# np.testing.assert_allclose(out_ref, r, rtol=1e-05)
228-
229-
# def test_dygraph_api(self):
230-
# paddle.disable_static(self.place)
231-
# x = paddle.to_tensor(self.x_np)
232-
# out1 = F.leaky_relu(x)
233-
# m = paddle.nn.LeakyReLU()
234-
# out2 = m(x)
235-
# out_ref = ref_leaky_relu(self.x_np)
236-
# for r in [out1, out2]:
237-
# np.testing.assert_allclose(out_ref, r.numpy(), rtol=1e-05)
238-
239-
# out1 = F.leaky_relu(x, 0.6)
240-
# m = paddle.nn.LeakyReLU(0.6)
241-
# out2 = m(x)
242-
# out_ref = ref_leaky_relu(self.x_np, 0.6)
243-
# for r in [out1, out2]:
244-
# np.testing.assert_allclose(out_ref, r.numpy(), rtol=1e-05)
245-
# paddle.enable_static()
162+
class TestLeakyRelu(TestActivation):
163+
def get_alpha(self):
164+
return 0.02
165+
166+
def setUp(self):
167+
self.set_npu()
168+
self.op_type = "leaky_relu"
169+
self.init_dtype()
170+
self.init_shape()
171+
alpha = self.get_alpha()
172+
173+
np.random.seed(1024)
174+
x = np.random.uniform(-1, 1, self.shape).astype(self.dtype)
175+
# The same reason with TestAbs
176+
x[np.abs(x) < 0.005] = 0.05
177+
out = ref_leaky_relu(x, alpha)
178+
179+
self.inputs = {"X": x}
180+
self.outputs = {"Out": out}
181+
self.attrs = {"alpha": alpha}
182+
183+
def test_check_grad(self):
184+
if self.dtype == np.float16:
185+
return
186+
self.check_grad_with_place(self.place, ["X"], "Out")
187+
188+
189+
class TestLeakyReluAlpha1(TestLeakyRelu):
190+
def get_alpha(self):
191+
return 2
192+
193+
194+
class TestLeakyReluAlpha2(TestLeakyRelu):
195+
def get_alpha(self):
196+
return -0.01
197+
198+
199+
class TestLeakyReluAlpha3(TestLeakyRelu):
200+
def get_alpha(self):
201+
return -2.0
202+
203+
204+
class TestLeakyRelu_ZeroDim(TestLeakyRelu):
205+
def init_shape(self):
206+
self.shape = []
207+
208+
209+
class TestLeakyReluAPI(unittest.TestCase):
210+
# test paddle.nn.LeakyReLU, paddle.nn.functional.leaky_relu,
211+
def setUp(self):
212+
np.random.seed(1024)
213+
self.x_np = np.random.uniform(-1, 1, [10, 12]).astype("float32")
214+
self.place = paddle.CustomPlace("npu", 0)
215+
216+
def test_static_api(self):
217+
paddle.enable_static()
218+
with paddle.static.program_guard(paddle.static.Program()):
219+
x = paddle.static.data("X", [10, 12])
220+
out1 = F.leaky_relu(x)
221+
m = paddle.nn.LeakyReLU()
222+
out2 = m(x)
223+
exe = paddle.static.Executor(self.place)
224+
res = exe.run(feed={"X": self.x_np}, fetch_list=[out1, out2])
225+
out_ref = ref_leaky_relu(self.x_np)
226+
for r in res:
227+
np.testing.assert_allclose(out_ref, r, rtol=1e-05)
228+
229+
def test_dygraph_api(self):
230+
paddle.disable_static(self.place)
231+
x = paddle.to_tensor(self.x_np)
232+
out1 = F.leaky_relu(x)
233+
m = paddle.nn.LeakyReLU()
234+
out2 = m(x)
235+
out_ref = ref_leaky_relu(self.x_np)
236+
for r in [out1, out2]:
237+
np.testing.assert_allclose(out_ref, r.numpy(), rtol=1e-05)
238+
239+
out1 = F.leaky_relu(x, 0.6)
240+
m = paddle.nn.LeakyReLU(0.6)
241+
out2 = m(x)
242+
out_ref = ref_leaky_relu(self.x_np, 0.6)
243+
for r in [out1, out2]:
244+
np.testing.assert_allclose(out_ref, r.numpy(), rtol=1e-05)
245+
paddle.enable_static()
246246

247247

248248
def gelu(x, approximate):

0 commit comments

Comments
 (0)