|
45 | 45 | par5j = {'x': np.random.normal(300, 300, (500, 501)) + 1.0j * np.random.normal(50, 50, (500, 501)), |
46 | 46 | 'partition': Partition.SCATTER, 'axis': 1} |
47 | 47 |
|
48 | | -par6 = {'x': np.random.normal(100, 100, (500, 500)), |
| 48 | +par6 = {'x': np.random.normal(100, 100, (600, 600)), |
49 | 49 | 'partition': Partition.SCATTER, 'axis': 0} |
50 | 50 |
|
51 | | -par6b = {'x': np.random.normal(100, 100, (500, 500)), |
| 51 | +par6b = {'x': np.random.normal(100, 100, (600, 600)), |
52 | 52 | 'partition': Partition.BROADCAST, 'axis': 0} |
53 | 53 |
|
54 | | -par7 = {'x': np.random.normal(300, 300, (500, 500)), |
| 54 | +par7 = {'x': np.random.normal(300, 300, (600, 600)), |
55 | 55 | 'partition': Partition.SCATTER, 'axis': 0} |
56 | 56 |
|
57 | | -par7b = {'x': np.random.normal(300, 300, (500, 500)), |
| 57 | +par7b = {'x': np.random.normal(300, 300, (600, 600)), |
58 | 58 | 'partition': Partition.BROADCAST, 'axis': 0} |
59 | 59 |
|
60 | | -par8 = {'x': np.random.normal(100, 100, (1000,)), |
| 60 | +par8 = {'x': np.random.normal(100, 100, (1200,)), |
61 | 61 | 'partition': Partition.SCATTER, 'axis': 0} |
62 | 62 |
|
63 | | -par8b = {'x': np.random.normal(100, 100, (1000,)), |
| 63 | +par8b = {'x': np.random.normal(100, 100, (1200,)), |
64 | 64 | 'partition': Partition.BROADCAST, 'axis': 0} |
65 | 65 |
|
66 | | -par9 = {'x': np.random.normal(300, 300, (1000,)), |
| 66 | +par9 = {'x': np.random.normal(300, 300, (1200,)), |
67 | 67 | 'partition': Partition.SCATTER, 'axis': 0} |
68 | 68 |
|
69 | | -par9b = {'x': np.random.normal(300, 300, (1000,)), |
| 69 | +par9b = {'x': np.random.normal(300, 300, (1200,)), |
70 | 70 | 'partition': Partition.BROADCAST, 'axis': 0} |
71 | 71 |
|
72 | 72 |
|
@@ -192,3 +192,56 @@ def test_distributed_norm(par): |
192 | 192 | assert_allclose(arr.norm(ord=np.inf, axis=par['axis']), |
193 | 193 | np.linalg.norm(par['x'], ord=np.inf, axis=par['axis']), rtol=1e-14) |
194 | 194 | assert_allclose(arr.norm(), np.linalg.norm(par['x'].flatten()), rtol=1e-13) |
| 195 | + |
| 196 | + |
| 197 | +@pytest.mark.mpi(min_size=2) |
| 198 | +@pytest.mark.parametrize("par1, par2", [(par6, par7), (par6b, par7b), |
| 199 | + (par8, par9), (par8b, par9b)]) |
| 200 | +def test_distributed_maskeddot(par1, par2): |
| 201 | + """Test Distributed Dot product with masked array""" |
| 202 | + nsub = 3 # number of subcommunicators |
| 203 | + subsize = max(1, MPI.COMM_WORLD.Get_size() // nsub) |
| 204 | + mask = np.repeat(np.arange(nsub), subsize) |
| 205 | + # Replicate x1 and x2 as required in masked arrays |
| 206 | + x1, x2 = par1['x'], par2['x'] |
| 207 | + if par1['axis'] != 0: |
| 208 | + x1 = np.swapaxes(x1, par1['axis'], 0) |
| 209 | + for isub in range(1, nsub): |
| 210 | + x1[(x1.shape[0] // nsub) * isub:(x1.shape[0] // nsub) * (isub + 1)] = x1[:x1.shape[0] // nsub] |
| 211 | + if par1['axis'] != 0: |
| 212 | + x1 = np.swapaxes(x1, 0, par1['axis']) |
| 213 | + if par2['axis'] != 0: |
| 214 | + x2 = np.swapaxes(x2, par2['axis'], 0) |
| 215 | + for isub in range(1, nsub): |
| 216 | + x2[(x2.shape[0] // nsub) * isub:(x2.shape[0] // nsub) * (isub + 1)] = x2[:x2.shape[0] // nsub] |
| 217 | + if par2['axis'] != 0: |
| 218 | + x2 = np.swapaxes(x2, 0, par2['axis']) |
| 219 | + |
| 220 | + arr1 = DistributedArray.to_dist(x=x1, partition=par1['partition'], mask=mask, axis=par1['axis']) |
| 221 | + arr2 = DistributedArray.to_dist(x=x2, partition=par2['partition'], mask=mask, axis=par2['axis']) |
| 222 | + assert_allclose(arr1.dot(arr2), np.dot(x1.flatten(), x2.flatten()) / nsub, rtol=1e-14) |
| 223 | + |
| 224 | + |
| 225 | +@pytest.mark.mpi(min_size=2) |
| 226 | +@pytest.mark.parametrize("par", [(par6), (par6b), (par7), (par7b), |
| 227 | + (par8), (par8b), (par9), (par9b)]) |
| 228 | +def test_distributed_maskednorm(par): |
| 229 | + """Test Distributed numpy.linalg.norm method with masked array""" |
| 230 | + nsub = 3 # number of subcommunicators |
| 231 | + subsize = max(1, MPI.COMM_WORLD.Get_size() // nsub) |
| 232 | + mask = np.repeat(np.arange(nsub), subsize) |
| 233 | + # Replicate x as required in masked arrays |
| 234 | + x = par['x'] |
| 235 | + if par['axis'] != 0: |
| 236 | + x = np.swapaxes(x, par['axis'], 0) |
| 237 | + for isub in range(1, nsub): |
| 238 | + x[(x.shape[0] // nsub) * isub:(x.shape[0] // nsub) * (isub + 1)] = x[:x.shape[0] // nsub] |
| 239 | + if par['axis'] != 0: |
| 240 | + x = np.swapaxes(x, 0, par['axis']) |
| 241 | + arr = DistributedArray.to_dist(x=x, mask=mask, axis=par['axis']) |
| 242 | + assert_allclose(arr.norm(ord=1, axis=par['axis']), |
| 243 | + np.linalg.norm(par['x'], ord=1, axis=par['axis']) / nsub, rtol=1e-14) |
| 244 | + assert_allclose(arr.norm(ord=np.inf, axis=par['axis']), |
| 245 | + np.linalg.norm(par['x'], ord=np.inf, axis=par['axis']), rtol=1e-14) |
| 246 | + assert_allclose(arr.norm(ord=2, axis=par['axis']), |
| 247 | + np.linalg.norm(par['x'], ord=2, axis=par['axis']) / np.sqrt(nsub), rtol=1e-13) |
0 commit comments