Skip to content

Commit d81454e

Browse files
Enable cupyx tests
1 parent 87074fa commit d81454e

File tree

3 files changed

+209
-0
lines changed

3 files changed

+209
-0
lines changed

dpnp/tests/helper.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import importlib.util
12
from sys import platform
23

34
import dpctl
@@ -488,6 +489,14 @@ def is_ptl(device=None):
488489
return _get_dev_mask(device) in (0xB000, 0xFD00)
489490

490491

492+
def is_scipy_available():
493+
"""
494+
Return True if SciPy is installed and can be found,
495+
False otherwise.
496+
"""
497+
return importlib.util.find_spec("scipy") is not None
498+
499+
491500
def is_tgllp_iris_xe(device=None):
492501
"""
493502
Return True if a test is running on Tiger Lake-LP with Iris Xe GPU device,

dpnp/tests/third_party/cupyx/scipy_tests/linalg_tests/__init__.py

Whitespace-only changes.
Lines changed: 200 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,200 @@
1+
from __future__ import annotations
2+
3+
import unittest
4+
import warnings
5+
6+
import numpy
7+
import pytest
8+
9+
import dpnp as cupy
10+
from dpnp.tests.third_party.cupy import testing
11+
12+
if cupy.tests.helper.is_scipy_available():
13+
import scipy.linalg
14+
15+
16+
# TODO: After the feature is released
17+
# requires_scipy_linalg_backend = testing.with_requires('scipy>=1.x.x')
18+
requires_scipy_linalg_backend = unittest.skip(
19+
"scipy.linalg backend feature has not been released"
20+
)
21+
22+
23+
@testing.parameterize(
24+
*testing.product(
25+
{
26+
"shape": [
27+
(1, 1),
28+
(2, 2),
29+
(3, 3),
30+
(5, 5),
31+
(1, 5),
32+
(5, 1),
33+
(2, 5),
34+
(5, 2),
35+
],
36+
}
37+
)
38+
)
39+
@testing.fix_random()
40+
@testing.with_requires("scipy")
41+
class TestLUFactor(unittest.TestCase):
42+
43+
@testing.for_dtypes("fdFD")
44+
def test_lu_factor(self, dtype):
45+
if self.shape[0] != self.shape[1]:
46+
self.skipTest(
47+
"skip non-square tests since scipy.lu_factor requires square"
48+
)
49+
a_cpu = testing.shaped_random(self.shape, numpy, dtype=dtype)
50+
a_gpu = cupy.asarray(a_cpu)
51+
result_cpu = scipy.linalg.lu_factor(a_cpu)
52+
# Originally used cupyx.scipy.linalg.lu_factor
53+
result_gpu = cupy.linalg.lu_factor(a_gpu)
54+
assert len(result_cpu) == len(result_gpu)
55+
assert result_cpu[0].dtype == result_gpu[0].dtype
56+
# DPNP returns pivot indices as int64, while SciPy returns int32.
57+
# Check for the expected dtypes explicitly.
58+
# assert result_cpu[1].dtype == result_gpu[1].dtype
59+
assert result_cpu[1].dtype == cupy.int32
60+
assert result_gpu[1].dtype == cupy.int64
61+
testing.assert_allclose(result_cpu[0], result_gpu[0], atol=1e-5)
62+
testing.assert_array_equal(result_cpu[1], result_gpu[1])
63+
64+
def check_lu_factor_reconstruction(self, A):
65+
m, n = self.shape
66+
lu, piv = cupy.linalg.lu_factor(A)
67+
# extract ``L`` and ``U`` from ``lu``
68+
L = cupy.tril(lu, k=-1)
69+
cupy.fill_diagonal(L, 1.0)
70+
L = L[:, :m]
71+
U = cupy.triu(lu)
72+
U = U[:n, :]
73+
# check output shapes
74+
assert lu.shape == (m, n)
75+
assert L.shape == (m, min(m, n))
76+
assert U.shape == (min(m, n), n)
77+
assert piv.shape == (min(m, n),)
78+
# apply pivot (on CPU since slaswp is not available in cupy)
79+
piv = cupy.asnumpy(piv)
80+
rows = list(range(m))
81+
for i, row in enumerate(piv):
82+
if i != row:
83+
rows[i], rows[row] = rows[row], rows[i]
84+
rows = cupy.asarray(rows)
85+
PA = A[rows]
86+
# check that reconstruction is close to original
87+
LU = L.dot(U)
88+
testing.assert_allclose(LU, PA, atol=1e-5)
89+
90+
@testing.for_dtypes("fdFD")
91+
def test_lu_factor_reconstruction(self, dtype):
92+
A = testing.shaped_random(self.shape, cupy, dtype=dtype)
93+
self.check_lu_factor_reconstruction(A)
94+
95+
@testing.for_dtypes("fdFD")
96+
def test_lu_factor_reconstruction_singular(self, dtype):
97+
if self.shape[0] != self.shape[1]:
98+
self.skipTest(
99+
"skip non-square tests since scipy.lu_factor requires square"
100+
)
101+
A = testing.shaped_random(self.shape, cupy, dtype=dtype)
102+
A -= A.mean(axis=0, keepdims=True)
103+
A -= A.mean(axis=1, keepdims=True)
104+
with warnings.catch_warnings():
105+
warnings.simplefilter("ignore", RuntimeWarning)
106+
self.check_lu_factor_reconstruction(A)
107+
108+
109+
@testing.parameterize(
110+
*testing.product(
111+
{
112+
"shape": [
113+
(1, 1),
114+
(2, 2),
115+
(3, 3),
116+
(5, 5),
117+
(1, 5),
118+
(5, 1),
119+
(2, 5),
120+
(5, 2),
121+
],
122+
"permute_l": [False, True],
123+
}
124+
)
125+
)
126+
@testing.fix_random()
127+
@testing.with_requires("scipy")
128+
class TestLU(unittest.TestCase):
129+
@classmethod
130+
def setUpClass(cls):
131+
pytest.skip("lu() is not supported yet")
132+
133+
@testing.for_dtypes("fdFD")
134+
def test_lu(self, dtype):
135+
a_cpu = testing.shaped_random(self.shape, numpy, dtype=dtype)
136+
a_gpu = cupy.asarray(a_cpu)
137+
result_cpu = scipy.linalg.lu(a_cpu, permute_l=self.permute_l)
138+
result_gpu = cupy.linalg.lu(a_gpu, permute_l=self.permute_l)
139+
assert len(result_cpu) == len(result_gpu)
140+
if not self.permute_l:
141+
# check permutation matrix
142+
result_cpu = list(result_cpu)
143+
result_gpu = list(result_gpu)
144+
P_cpu = result_cpu.pop(0)
145+
P_gpu = result_gpu.pop(0)
146+
cupy.testing.assert_array_equal(P_gpu, P_cpu)
147+
cupy.testing.assert_allclose(result_gpu[0], result_cpu[0], atol=1e-5)
148+
cupy.testing.assert_allclose(result_gpu[1], result_cpu[1], atol=1e-5)
149+
150+
@testing.for_dtypes("fdFD")
151+
def test_lu_reconstruction(self, dtype):
152+
m, n = self.shape
153+
A = testing.shaped_random(self.shape, cupy, dtype=dtype)
154+
if self.permute_l:
155+
PL, U = cupy.linalg.lu(A, permute_l=self.permute_l)
156+
PLU = PL @ U
157+
else:
158+
P, L, U = cupy.linalg.lu(A, permute_l=self.permute_l)
159+
PLU = P @ L @ U
160+
# check that reconstruction is close to original
161+
cupy.testing.assert_allclose(PLU, A, atol=1e-5)
162+
163+
164+
@testing.parameterize(
165+
*testing.product(
166+
{
167+
"trans": [0, 1, 2],
168+
"shapes": [((4, 4), (4,)), ((5, 5), (5, 2))],
169+
}
170+
)
171+
)
172+
@testing.fix_random()
173+
@testing.with_requires("scipy")
174+
class TestLUSolve(unittest.TestCase):
175+
176+
@testing.for_dtypes("fdFD")
177+
@testing.numpy_cupy_allclose(atol=1e-5, scipy_name="scp")
178+
def test_lu_solve(self, xp, scp, dtype):
179+
a_shape, b_shape = self.shapes
180+
A = testing.shaped_random(a_shape, xp, dtype=dtype)
181+
b = testing.shaped_random(b_shape, xp, dtype=dtype)
182+
lu = scp.linalg.lu_factor(A)
183+
return scp.linalg.lu_solve(lu, b, trans=self.trans)
184+
185+
@requires_scipy_linalg_backend
186+
@testing.for_dtypes("fdFD")
187+
@testing.numpy_cupy_allclose(atol=1e-5)
188+
def test_lu_solve_backend(self, xp, dtype):
189+
a_shape, b_shape = self.shapes
190+
A = testing.shaped_random(a_shape, xp, dtype=dtype)
191+
b = testing.shaped_random(b_shape, xp, dtype=dtype)
192+
if xp is numpy:
193+
lu = scipy.linalg.lu_factor(A)
194+
backend = "scipy"
195+
else:
196+
lu = cupy.linalg.lu_factor(A)
197+
backend = cupy.linalg
198+
with scipy.linalg.set_backend(backend):
199+
out = scipy.linalg.lu_solve(lu, b, trans=self.trans)
200+
return out

0 commit comments

Comments
 (0)