Skip to content

Commit 7a5ea19

Browse files
committed
test: added tests when using mask in DistributedArray
1 parent 5144312 commit 7a5ea19

File tree

3 files changed

+120
-15
lines changed

3 files changed

+120
-15
lines changed

examples/plot_distributed_array.py

Lines changed: 58 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -128,24 +128,76 @@
128128

129129
# Create mask
130130
nsub = 2
131-
mask = np.repeat(np.arange(size // nsub), nsub)
132-
if rank == 0: print(f"Mask: {mask}")
131+
subsize = max(1, size // nsub)
132+
mask = np.repeat(np.arange(size // subsize), subsize)
133+
if rank == 0:
134+
print("1D masked arrays")
135+
print(f"Mask: {mask}")
133136

134137
# Create and fill the distributed array
135138
x = pylops_mpi.DistributedArray(global_shape=global_shape,
136139
partition=Partition.SCATTER,
137140
mask=mask)
138-
x[:] = (MPI.COMM_WORLD.Get_rank() + 1) * np.ones(local_shape)
141+
x[:] = (MPI.COMM_WORLD.Get_rank() % subsize + 1.) * np.ones(local_shape)
139142
xloc = x.asarray()
140143

141144
# Dot product
142145
dot = x.dot(x)
143-
dotloc = np.dot(xloc[local_shape * nsub * (rank // nsub):local_shape * nsub * (rank // nsub + 1)],
144-
xloc[local_shape * nsub * (rank // nsub):local_shape * nsub * (rank // nsub + 1)])
146+
dotloc = np.dot(xloc[local_shape * subsize * (rank // subsize):local_shape * subsize * (rank // subsize + 1)],
147+
xloc[local_shape * subsize * (rank // subsize):local_shape * subsize * (rank // subsize + 1)])
145148
print(f"Dot check (Rank {rank}): {np.allclose(dot, dotloc)}")
146149

147150
# Norm
148151
norm = x.norm(ord=2)
149-
normloc = np.linalg.norm(xloc[local_shape * nsub * (rank // nsub):local_shape * nsub * (rank // nsub + 1)],
152+
normloc = np.linalg.norm(xloc[local_shape * subsize * (rank // subsize):local_shape * subsize * (rank // subsize + 1)],
150153
ord=2)
151154
print(f"Norm check (Rank {rank}): {np.allclose(norm, normloc)}")
155+
156+
###############################################################################
157+
# And with 2d-arrays distributed over axis=1
158+
extra_dim_shape = 2
159+
if rank == 0:
160+
print("2D masked arrays (over axis=1)")
161+
162+
# Create and fill the distributed array
163+
x = pylops_mpi.DistributedArray(global_shape=(extra_dim_shape, global_shape),
164+
partition=Partition.SCATTER,
165+
axis=1, mask=mask)
166+
x[:] = (MPI.COMM_WORLD.Get_rank() % subsize + 1.) * np.ones((extra_dim_shape, local_shape))
167+
xloc = x.asarray()
168+
169+
# Dot product
170+
dot = x.dot(x)
171+
dotloc = np.dot(xloc[:, local_shape * subsize * (rank // subsize):local_shape * subsize * (rank // subsize + 1)].ravel(),
172+
xloc[:, local_shape * subsize * (rank // subsize):local_shape * subsize * (rank // subsize + 1)].ravel())
173+
print(f"Dot check (Rank {rank}): {np.allclose(dot, dotloc)}")
174+
175+
# Norm
176+
norm = x.norm(ord=2, axis=1)
177+
normloc = np.linalg.norm(xloc[:, local_shape * subsize * (rank // subsize):local_shape * subsize * (rank // subsize + 1)],
178+
ord=2, axis=1)
179+
print(f"Norm check (Rank {rank}): {np.allclose(norm, normloc)}")
180+
181+
###############################################################################
182+
# And finally with 2d-arrays distributed over axis=0
183+
if rank == 0:
184+
print("2D masked arrays (over axis=0)")
185+
186+
# Create and fill the distributed array
187+
x = pylops_mpi.DistributedArray(global_shape=(global_shape, extra_dim_shape),
188+
partition=Partition.SCATTER,
189+
axis=0, mask=mask)
190+
x[:] = (MPI.COMM_WORLD.Get_rank() % subsize + 1.) * np.ones((local_shape, extra_dim_shape))
191+
xloc = x.asarray()
192+
193+
# Dot product
194+
dot = x.dot(x)
195+
dotloc = np.dot(xloc[local_shape * subsize * (rank // subsize):local_shape * subsize * (rank // subsize + 1)].ravel(),
196+
xloc[local_shape * subsize * (rank // subsize):local_shape * subsize * (rank // subsize + 1)].ravel())
197+
print(f"Dot check (Rank {rank}): {np.allclose(dot, dotloc)}")
198+
199+
# Norm
200+
norm = x.norm(ord=2, axis=0)
201+
normloc = np.linalg.norm(xloc[local_shape * subsize * (rank // subsize):local_shape * subsize * (rank // subsize + 1)],
202+
ord=2, axis=0)
203+
print(f"Norm check (Rank {rank}): {np.allclose(norm, normloc)}")

pylops_mpi/DistributedArray.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -319,7 +319,7 @@ def to_dist(cls, x: NDArray,
319319
operations on the distributed array such as dot product or norm.
320320
321321
Returns
322-
----------
322+
-------
323323
dist_array : :obj:`DistributedArray`
324324
Distributed Array of the Global Array
325325
"""

tests/test_distributedarray.py

Lines changed: 61 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -45,28 +45,28 @@
4545
par5j = {'x': np.random.normal(300, 300, (500, 501)) + 1.0j * np.random.normal(50, 50, (500, 501)),
4646
'partition': Partition.SCATTER, 'axis': 1}
4747

48-
par6 = {'x': np.random.normal(100, 100, (500, 500)),
48+
par6 = {'x': np.random.normal(100, 100, (600, 600)),
4949
'partition': Partition.SCATTER, 'axis': 0}
5050

51-
par6b = {'x': np.random.normal(100, 100, (500, 500)),
51+
par6b = {'x': np.random.normal(100, 100, (600, 600)),
5252
'partition': Partition.BROADCAST, 'axis': 0}
5353

54-
par7 = {'x': np.random.normal(300, 300, (500, 500)),
54+
par7 = {'x': np.random.normal(300, 300, (600, 600)),
5555
'partition': Partition.SCATTER, 'axis': 0}
5656

57-
par7b = {'x': np.random.normal(300, 300, (500, 500)),
57+
par7b = {'x': np.random.normal(300, 300, (600, 600)),
5858
'partition': Partition.BROADCAST, 'axis': 0}
5959

60-
par8 = {'x': np.random.normal(100, 100, (1000,)),
60+
par8 = {'x': np.random.normal(100, 100, (1200,)),
6161
'partition': Partition.SCATTER, 'axis': 0}
6262

63-
par8b = {'x': np.random.normal(100, 100, (1000,)),
63+
par8b = {'x': np.random.normal(100, 100, (1200,)),
6464
'partition': Partition.BROADCAST, 'axis': 0}
6565

66-
par9 = {'x': np.random.normal(300, 300, (1000,)),
66+
par9 = {'x': np.random.normal(300, 300, (1200,)),
6767
'partition': Partition.SCATTER, 'axis': 0}
6868

69-
par9b = {'x': np.random.normal(300, 300, (1000,)),
69+
par9b = {'x': np.random.normal(300, 300, (1200,)),
7070
'partition': Partition.BROADCAST, 'axis': 0}
7171

7272

@@ -192,3 +192,56 @@ def test_distributed_norm(par):
192192
assert_allclose(arr.norm(ord=np.inf, axis=par['axis']),
193193
np.linalg.norm(par['x'], ord=np.inf, axis=par['axis']), rtol=1e-14)
194194
assert_allclose(arr.norm(), np.linalg.norm(par['x'].flatten()), rtol=1e-13)
195+
196+
197+
@pytest.mark.mpi(min_size=2)
198+
@pytest.mark.parametrize("par1, par2", [(par6, par7), (par6b, par7b),
199+
(par8, par9), (par8b, par9b)])
200+
def test_distributed_maskeddot(par1, par2):
201+
"""Test Distributed Dot product with masked array"""
202+
nsub = 3 # number of subcommunicators
203+
subsize = max(1, MPI.COMM_WORLD.Get_size() // nsub)
204+
mask = np.repeat(np.arange(nsub), subsize)
205+
# Replicate x1 and x2 as required in masked arrays
206+
x1, x2 = par1['x'], par2['x']
207+
if par1['axis'] != 0:
208+
x1 = np.swapaxes(x1, par1['axis'], 0)
209+
for isub in range(1, nsub):
210+
x1[(x1.shape[0] // nsub) * isub:(x1.shape[0] // nsub) * (isub + 1)] = x1[:x1.shape[0] // nsub]
211+
if par1['axis'] != 0:
212+
x1 = np.swapaxes(x1, 0, par1['axis'])
213+
if par2['axis'] != 0:
214+
x2 = np.swapaxes(x2, par2['axis'], 0)
215+
for isub in range(1, nsub):
216+
x2[(x2.shape[0] // nsub) * isub:(x2.shape[0] // nsub) * (isub + 1)] = x2[:x2.shape[0] // nsub]
217+
if par2['axis'] != 0:
218+
x2 = np.swapaxes(x2, 0, par2['axis'])
219+
220+
arr1 = DistributedArray.to_dist(x=x1, partition=par1['partition'], mask=mask, axis=par1['axis'])
221+
arr2 = DistributedArray.to_dist(x=x2, partition=par2['partition'], mask=mask, axis=par2['axis'])
222+
assert_allclose(arr1.dot(arr2), np.dot(x1.flatten(), x2.flatten()) / nsub, rtol=1e-14)
223+
224+
225+
@pytest.mark.mpi(min_size=2)
226+
@pytest.mark.parametrize("par", [(par6), (par6b), (par7), (par7b),
227+
(par8), (par8b), (par9), (par9b)])
228+
def test_distributed_maskednorm(par):
229+
"""Test Distributed numpy.linalg.norm method with masked array"""
230+
nsub = 3 # number of subcommunicators
231+
subsize = max(1, MPI.COMM_WORLD.Get_size() // nsub)
232+
mask = np.repeat(np.arange(nsub), subsize)
233+
# Replicate x as required in masked arrays
234+
x = par['x']
235+
if par['axis'] != 0:
236+
x = np.swapaxes(x, par['axis'], 0)
237+
for isub in range(1, nsub):
238+
x[(x.shape[0] // nsub) * isub:(x.shape[0] // nsub) * (isub + 1)] = x[:x.shape[0] // nsub]
239+
if par['axis'] != 0:
240+
x = np.swapaxes(x, 0, par['axis'])
241+
arr = DistributedArray.to_dist(x=x, mask=mask, axis=par['axis'])
242+
assert_allclose(arr.norm(ord=1, axis=par['axis']),
243+
np.linalg.norm(par['x'], ord=1, axis=par['axis']) / nsub, rtol=1e-14)
244+
assert_allclose(arr.norm(ord=np.inf, axis=par['axis']),
245+
np.linalg.norm(par['x'], ord=np.inf, axis=par['axis']), rtol=1e-14)
246+
assert_allclose(arr.norm(ord=2, axis=par['axis']),
247+
np.linalg.norm(par['x'], ord=2, axis=par['axis']) / np.sqrt(nsub), rtol=1e-13)

0 commit comments

Comments
 (0)