Skip to content

Commit 57d3890

Browse files
committed
add MPI landlab example
1 parent fba7330 commit 57d3890

File tree

2 files changed

+267
-0
lines changed

2 files changed

+267
-0
lines changed
Lines changed: 262 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,262 @@
1+
"""
2+
This is an experiment to use pymetis and mpi4py for landlab parallel
3+
4+
Test to run on
5+
SimpleSubmarineDiffuser
6+
https://landlab.csdms.io/tutorials/marine_sediment_transport/simple_submarine_diffuser_tutorial.html
7+
8+
ver3: identify boundary nodes for each subdomain, run model with multiple times
9+
10+
To run the program:
11+
mpiexec -np 5 python mpi_landlab3.py
12+
13+
"""
14+
import os
15+
import numpy as np
16+
import pymetis
17+
18+
# import matplotlib
19+
# matplotlib.use('MacOSX')
20+
import matplotlib.pyplot as plt
21+
22+
from landlab import HexModelGrid, VoronoiDelaunayGrid
23+
from landlab.components import SimpleSubmarineDiffuser
24+
25+
26+
## step 0: set up parallel
27+
from mpi4py import MPI
28+
from collections import defaultdict
29+
30+
comm = MPI.COMM_WORLD
31+
rank = comm.Get_rank()
32+
size = comm.Get_size()
33+
34+
# Ensure number of partitions matches the MPI processes
35+
num_partitions = size
36+
assert size == num_partitions, "Number of MPI processes must match the number of partitions!"
37+
38+
if rank == 0:
39+
output_dir = os.path.join(os.getcwd(),'output')
40+
os.makedirs(output_dir, exist_ok=True)
41+
42+
## step 1: define hex model grid and assign z values
43+
mg = HexModelGrid((17, 17), spacing=1, node_layout='rect')
44+
z = mg.add_zeros("topographic__elevation", at="node")
45+
cum_depo = mg.add_zeros("total_deposit__thickness", at="node")
46+
47+
48+
midpoint = 8
49+
dx = np.abs(mg.x_of_node - midpoint)
50+
dy = np.abs(mg.y_of_node - midpoint)
51+
ds = np.sqrt(dx * dx + dy * dy)
52+
z[:] = (midpoint - ds) - 3.0
53+
z[z < -3.0] = -3.0
54+
z0 = z.copy()
55+
56+
# identify boundary nodes
57+
boundary_nodes = mg.boundary_nodes
58+
59+
# plot z 2D and 1D
60+
mg.imshow(z, cmap="coolwarm", vmin=-3)
61+
plt.title("Elevation on Global Grid")
62+
plt.savefig(os.path.join(output_dir, "dem_hex.png"))
63+
64+
plt.clf()
65+
plt.plot(mg.x_of_node, z, ".")
66+
plt.plot([0, 17], [0, 0], "b:")
67+
plt.grid(True)
68+
plt.xlabel("Distance (m)")
69+
plt.ylabel("Elevation (m)")
70+
plt.savefig(os.path.join(output_dir,"dem_hex_slice.png"))
71+
72+
# plot total_deposit__thickness
73+
plt.clf()
74+
mg.imshow("total_deposit__thickness", cmap="coolwarm", vmin=-1,vmax=1)
75+
plt.title("Total deposit thickness initiation (m)")
76+
plt.savefig(os.path.join(output_dir,"total_deposit_init.png"))
77+
78+
79+
## step2: grid partition
80+
adjacency_list = []
81+
82+
# create adjacency list for corners
83+
for node_id in mg.nodes.flat:
84+
adjacent_nodes = [n for n in mg.adjacent_nodes_at_node[node_id] if n != -1]
85+
adjacency_list.append(np.array(adjacent_nodes))
86+
# print(node_id, mg.adjacent_nodes_at_node[node_id], adjacent_nodes)
87+
88+
# Partition the grid using pymetis
89+
n_cuts, part_labels = pymetis.part_graph(num_partitions, adjacency=adjacency_list)
90+
91+
# Convert partition labels to a NumPy array
92+
partition_array = np.array(part_labels)
93+
print(partition_array)
94+
95+
# visualization
96+
fig, ax = plt.subplots(figsize=[16, 14])
97+
ax.scatter(mg.node_x, mg.node_y, c=partition_array, cmap='viridis')
98+
ax.set_title('grid partition based on nodes')
99+
for node_id in mg.nodes.flat:
100+
ax.annotate(f"{node_id}/par{partition_array[node_id]}",
101+
(mg.node_x[node_id], mg.node_y[node_id]),
102+
color='black', fontsize=8, ha='center', va='top')
103+
fig.savefig(os.path.join(output_dir,'global_grid_partition.png'))
104+
105+
print(f"grid partition at rank {rank}")
106+
else:
107+
part_labels = None
108+
mg = None
109+
adjacency_list = None
110+
boundary_nodes = None
111+
output_dir = None
112+
113+
# broadcast results to other ranks
114+
part_labels = comm.bcast(part_labels, root=0)
115+
116+
adjacency_list = comm.bcast(adjacency_list, root=0)
117+
boundary_nodes = comm.bcast(boundary_nodes, root=0)
118+
output_dir = comm.bcast(output_dir)
119+
120+
## step3: identify ghost nodes
121+
# local grid nodes
122+
local_nodes = [node for node, part in enumerate(part_labels) if part == rank]
123+
124+
125+
# identify ghost nodes for sending and receiving data
126+
send_to = defaultdict(set)
127+
recv_from = defaultdict(set)
128+
129+
for node in local_nodes:
130+
for neighbor in adjacency_list[node]:
131+
neighbor_part = part_labels[neighbor]
132+
if neighbor_part != rank:
133+
print(neighbor_part,node)
134+
send_to[neighbor_part].add(node)
135+
recv_from[neighbor_part].add(neighbor)
136+
137+
# loop for multiple time steps
138+
for time_step in range(0,20):
139+
mg = comm.bcast(mg, root=0)
140+
## step4: send and receive data for ghost nodes
141+
for pid, nodes_to_send in send_to.items():
142+
# Convert to sorted list
143+
nodes_to_send = sorted(nodes_to_send)
144+
elev_to_send = mg.at_node["topographic__elevation"][list(nodes_to_send)]
145+
comm.send((nodes_to_send, elev_to_send), dest=pid, tag=rank)
146+
#print(f"Rank {rank} sent data to {pid} for nodes: {nodes_to_send}")
147+
148+
ghost_nodes_values= {}
149+
for pid, ghost_nodes in recv_from.items():
150+
nodes, elev_values = comm.recv(source=pid, tag=pid)
151+
ghost_nodes_values.update(dict(zip(nodes, elev_values)))
152+
#if rank==1:
153+
#print(f"Rank {rank} received ghost data from {pid} for nodes {nodes}")
154+
#print(ghost_nodes_values)
155+
156+
157+
## step5: define local grid for simulation
158+
# define voronoi grid
159+
ghost_nodes = list(ghost_nodes_values.keys())
160+
161+
vmg_global_ind = sorted(local_nodes + ghost_nodes)
162+
x = mg.node_x[vmg_global_ind]
163+
y = mg.node_y[vmg_global_ind]
164+
elev_local = mg.at_node["topographic__elevation"][vmg_global_ind].copy()
165+
cum_depo_local = mg.at_node["total_deposit__thickness"][vmg_global_ind].copy()
166+
167+
# if rank==4:
168+
# print(f"loop {time_step}")
169+
# print(elev_local)
170+
local_vmg = VoronoiDelaunayGrid(x, y)
171+
local_vmg.add_field("topographic__elevation", elev_local, at="node")
172+
local_cum_depo = local_vmg.add_field("total_deposit__thickness", cum_depo_local, at="node")
173+
local_z0 = elev_local.copy()
174+
175+
local_nodes_as_boundary = [node for node in local_nodes if node in boundary_nodes]
176+
local_boundary_nodes_ind = [vmg_global_ind.index(val) for val in ghost_nodes + local_nodes_as_boundary]
177+
local_vmg.status_at_node[local_boundary_nodes_ind] = local_vmg.BC_NODE_IS_FIXED_VALUE
178+
# print(local_nodes_as_boundary)
179+
# print(local_boundary_nodes_ind)
180+
# print(vmg_global_ind)
181+
# print(local_vmg.status_at_node[local_boundary_nodes_ind])
182+
183+
# plot subgrid for each rank
184+
fig, ax = plt.subplots(figsize=[18, 14])
185+
sc = ax.scatter(local_vmg.node_x, local_vmg.node_y,
186+
c=local_vmg.at_node["topographic__elevation"], cmap="coolwarm", vmin=-3)
187+
ax.set_title(f'subgrid nodes rank={rank}')
188+
cbar = fig.colorbar(sc, ax=ax)
189+
cbar.set_label('Elevation (m)')
190+
fig.savefig(os.path.join(output_dir,f'subgrid_for_rank{rank}.png'))
191+
192+
193+
## step 6: run simulation
194+
# define model
195+
ssd = SimpleSubmarineDiffuser(
196+
local_vmg, sea_level=0.0, wave_base=1.0, shallow_water_diffusivity=1.0
197+
)
198+
199+
# run one step
200+
ssd.run_one_step(0.2)
201+
local_cum_depo += local_vmg.at_node["sediment_deposit__thickness"]
202+
203+
# plot results
204+
elev_diff = (local_z0 - elev_local) * 100
205+
fig, ax = plt.subplots(figsize=[16, 14])
206+
sc = ax.scatter(local_vmg.node_x, local_vmg.node_y,c=elev_diff, cmap="coolwarm", vmin=-6,vmax=6)
207+
ax.set_title(f'Changes of elevation rank={rank}')
208+
for node_id in local_boundary_nodes_ind:
209+
ax.annotate(f"B",
210+
(local_vmg.node_x[node_id], local_vmg.node_y[node_id]),
211+
color='blue', fontsize=12, ha='center', va='top')
212+
cbar = fig.colorbar(sc, ax=ax)
213+
cbar.set_label('Elevation Change (cm)')
214+
fig.savefig(os.path.join(output_dir,f'elev_diff_for_rank{rank}.png'))
215+
216+
# close all plots
217+
plt.close('all')
218+
219+
## step 7 gather all updates to rank 0
220+
# Create local update data (only for owned nodes, not ghost)
221+
local_updates = []
222+
for node in local_nodes:
223+
vmg_local_ind = vmg_global_ind.index(node)
224+
local_updates.append( (node,
225+
elev_local[vmg_local_ind],
226+
local_cum_depo[vmg_local_ind]) )
227+
228+
229+
all_updates = comm.gather(local_updates, root=0)
230+
231+
if rank == 0:
232+
# Flatten list of updates from all ranks
233+
flat_updates = [item for sublist in all_updates for item in sublist]
234+
for node_id, elev, cum_depo in flat_updates:
235+
mg.at_node["topographic__elevation"][node_id] = elev
236+
mg.at_node["total_deposit__thickness"][node_id] = cum_depo
237+
238+
#print(z0 == mg.at_node["topographic__elevation"])
239+
240+
# plot results
241+
# diff_z and total_deposit_result should be the same
242+
plt.clf()
243+
mg.imshow("total_deposit__thickness", cmap="coolwarm")
244+
plt.title("Total deposit thickness results (m)")
245+
plt.savefig(os.path.join(output_dir,"total_deposit_result.png"))
246+
247+
plt.clf()
248+
diff_z = mg.at_node["topographic__elevation"] - z0
249+
mg.imshow(diff_z, cmap="coolwarm")
250+
plt.title("Changes of elevation (m)")
251+
plt.savefig(os.path.join(output_dir,"dem_diff_hex_model_result.png"))
252+
253+
plt.clf()
254+
mg.imshow("topographic__elevation", cmap="coolwarm")
255+
plt.title("Topographic elevation result (m)")
256+
plt.savefig(os.path.join(output_dir,"dem_hex_model_result.png"))
257+
plt.close('all')
258+
259+
print(f"loop {time_step}")
260+
print(diff_z.max(), diff_z.min())
261+
print(mg.at_node["total_deposit__thickness"].max(),
262+
mg.at_node["total_deposit__thickness"].min())

cookbooks/landlab/test.sh

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,3 +6,8 @@ set -e
66
cd mpi-hello-world
77
mpirun -n 2 python test.py
88
cd ..
9+
10+
# A first parallel landlab example
11+
cd mpi-landlab-test
12+
mpirun -n 2 python mpi_landlab3.py
13+
cd ..

0 commit comments

Comments
 (0)