Skip to content

Commit af3215f

Browse files
committed
Merge branch 'master' into neuralpint
2 parents 0da6851 + 4116ae3 commit af3215f

File tree

7 files changed

+84
-37
lines changed

7 files changed

+84
-37
lines changed

.github/workflows/ci_pipeline.yml

Lines changed: 21 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -173,14 +173,19 @@ jobs:
173173
user_firedrake_tests:
174174
runs-on: ubuntu-latest
175175
container:
176-
image: firedrakeproject/firedrake-vanilla:latest
176+
image: firedrakeproject/firedrake-vanilla-default:latest
177177
options: --user root
178178
volumes:
179179
- ${{ github.workspace }}:/repositories
180180
defaults:
181181
run:
182182
shell: bash -l {0}
183183
steps:
184+
- name: Fix HOME
185+
# For unknown reasons GitHub actions overwrite HOME to /github/home
186+
# which will break everything unless fixed
187+
# (https://github.com/actions/runner/issues/863)
188+
run: echo "HOME=/home/firedrake" >> "$GITHUB_ENV"
184189
- name: Checkout pySDC
185190
uses: actions/checkout@v4
186191
with:
@@ -190,25 +195,32 @@ jobs:
190195
with:
191196
repository: firedrakeproject/gusto
192197
path: ./gusto_repo
198+
- name: Create virtual environment
199+
# pass '--system-site-packages' so Firedrake can be found
200+
run: python3 -m venv --system-site-packages venv-pySDC
201+
193202
- name: Install pySDC
194203
run: |
195-
. /home/firedrake/firedrake/bin/activate
196-
python -m pip install --no-deps -e /repositories/pySDC
197-
python -m pip install qmat
204+
. venv-pySDC/bin/activate
205+
pip install -e /repositories/pySDC
206+
pip install qmat
207+
# test installation
208+
python -c "import pySDC; print(f'pySDC module: {pySDC}')"
198209
- name: Install gusto
199210
run: |
200-
. /home/firedrake/firedrake/bin/activate
201-
python -m pip install -e /repositories/gusto_repo
211+
. venv-pySDC/bin/activate
212+
pip install -e /repositories/gusto_repo
213+
# test installation
214+
python -c "import gusto; print(f'gusto module: {gusto}')"
202215
- name: run pytest
203216
run: |
204-
. /home/firedrake/firedrake/bin/activate
217+
. venv-pySDC/bin/activate
205218
firedrake-clean
206219
cd ./pySDC
207-
coverage run -m pytest --continue-on-collection-errors -v --durations=0 /repositories/pySDC/pySDC/tests -m firedrake
220+
python -m coverage run -m pytest --continue-on-collection-errors -v --durations=0 /repositories/pySDC/pySDC/tests -m firedrake
208221
timeout-minutes: 45
209222
- name: Make coverage report
210223
run: |
211-
. /home/firedrake/firedrake/bin/activate
212224
213225
cd ./pySDC
214226
mv data ../data_firedrake

pySDC/helpers/fieldsIO.py

Lines changed: 48 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -45,9 +45,7 @@
4545
Warning
4646
-------
4747
To use MPI collective writing, you need to call first the class methods :class:`Rectilinear.initMPI` (cf their docstring).
48-
Also, `Rectilinear.setHeader` **must be given the global grids coordinates**, wether the code is run in parallel or not.
49-
50-
> ⚠️ Also : this module can only be imported with **Python 3.11 or higher** !
48+
Also, `Rectilinear.setHeader` **must be given the global grids coordinates**, whether the code is run in parallel or not.
5149
"""
5250
import os
5351
import numpy as np
@@ -207,7 +205,7 @@ def initialize(self):
207205
if not self.ALLOW_OVERWRITE:
208206
assert not os.path.isfile(
209207
self.fileName
210-
), "file already exists, use FieldsIO.ALLOW_OVERWRITE = True to allow overwriting"
208+
), f"file {self.fileName!r} already exists, use FieldsIO.ALLOW_OVERWRITE = True to allow overwriting"
211209

212210
with open(self.fileName, "w+b") as f:
213211
self.hBase.tofile(f)
@@ -480,7 +478,7 @@ def toVTR(self, baseName, varNames, idxFormat="{:06d}"):
480478
481479
Example
482480
-------
483-
>>> # Suppose the FieldsIO object is already writen into outputs.pysdc
481+
>>> # Suppose the FieldsIO object is already written into outputs.pysdc
484482
>>> import os
485483
>>> from pySDC.utils.fieldsIO import Rectilinear
486484
>>> os.makedirs("vtrFiles") # to store all VTR files into a subfolder
@@ -499,12 +497,13 @@ def toVTR(self, baseName, varNames, idxFormat="{:06d}"):
499497
# MPI-parallel implementation
500498
# -------------------------------------------------------------------------
501499
comm: MPI.Intracomm = None
500+
_nCollectiveIO = None
502501

503502
@classmethod
504503
def setupMPI(cls, comm: MPI.Intracomm, iLoc, nLoc):
505504
"""
506505
Setup the MPI mode for the files IO, considering a decomposition
507-
of the 1D grid into contiuous subintervals.
506+
of the 1D grid into contiguous subintervals.
508507
509508
Parameters
510509
----------
@@ -519,6 +518,20 @@ def setupMPI(cls, comm: MPI.Intracomm, iLoc, nLoc):
519518
cls.iLoc = iLoc
520519
cls.nLoc = nLoc
521520
cls.mpiFile: MPI.File = None
521+
cls._nCollectiveIO = None
522+
523+
@property
524+
def nCollectiveIO(self):
525+
"""
526+
Number of collective IO operations over all processes, when reading or writing a field.
527+
528+
Returns:
529+
--------
530+
int: Number of collective IO accesses
531+
"""
532+
if self._nCollectiveIO is None:
533+
self._nCollectiveIO = self.comm.allreduce(self.nVar * np.prod(self.nLoc[:-1]), op=MPI.MAX)
534+
return self._nCollectiveIO
522535

523536
@property
524537
def MPI_ON(self):
@@ -546,7 +559,7 @@ def MPI_WRITE(self, data):
546559
"""Write data (np.ndarray) in the binary file in MPI mode, at the current file cursor position."""
547560
self.mpiFile.Write(data)
548561

549-
def MPI_WRITE_AT(self, offset, data: np.ndarray):
562+
def MPI_WRITE_AT_ALL(self, offset, data: np.ndarray):
550563
"""
551564
Write data in the binary file in MPI mode, with a given offset
552565
**relative to the beginning of the file**.
@@ -560,7 +573,7 @@ def MPI_WRITE_AT(self, offset, data: np.ndarray):
560573
"""
561574
self.mpiFile.Write_at_all(offset, data)
562575

563-
def MPI_READ_AT(self, offset, data):
576+
def MPI_READ_AT_ALL(self, offset, data: np.ndarray):
564577
"""
565578
Read data from the binary file in MPI mode, with a given offset
566579
**relative to the beginning of the file**.
@@ -625,13 +638,22 @@ def addField(self, time, field):
625638

626639
offset0 = self.fileSize
627640
self.MPI_FILE_OPEN(mode="a")
641+
nWrites = 0
642+
nCollectiveIO = self.nCollectiveIO
643+
628644
if self.MPI_ROOT:
629645
self.MPI_WRITE(np.array(time, dtype=T_DTYPE))
630646
offset0 += self.tSize
631647

632648
for (iVar, *iBeg) in itertools.product(range(self.nVar), *[range(n) for n in self.nLoc[:-1]]):
633649
offset = offset0 + self.iPos(iVar, iBeg) * self.itemSize
634-
self.MPI_WRITE_AT(offset, field[iVar, *iBeg])
650+
self.MPI_WRITE_AT_ALL(offset, field[(iVar, *iBeg)])
651+
nWrites += 1
652+
653+
for _ in range(nCollectiveIO - nWrites):
654+
# Additional collective write to catch up with other processes
655+
self.MPI_WRITE_AT_ALL(offset0, field[:0])
656+
635657
self.MPI_FILE_CLOSE()
636658

637659
def iPos(self, iVar, iX):
@@ -674,9 +696,18 @@ def readField(self, idx):
674696
field = np.empty((self.nVar, *self.nLoc), dtype=self.dtype)
675697

676698
self.MPI_FILE_OPEN(mode="r")
699+
nReads = 0
700+
nCollectiveIO = self.nCollectiveIO
701+
677702
for (iVar, *iBeg) in itertools.product(range(self.nVar), *[range(n) for n in self.nLoc[:-1]]):
678703
offset = offset0 + self.iPos(iVar, iBeg) * self.itemSize
679-
self.MPI_READ_AT(offset, field[iVar, *iBeg])
704+
self.MPI_READ_AT_ALL(offset, field[(iVar, *iBeg)])
705+
nReads += 1
706+
707+
for _ in range(nCollectiveIO - nReads):
708+
# Additional collective read to catch up with other processes
709+
self.MPI_READ_AT_ALL(offset0, field[:0])
710+
680711
self.MPI_FILE_CLOSE()
681712

682713
return t, field
@@ -689,7 +720,7 @@ def initGrid(nVar, gridSizes):
689720
dim = len(gridSizes)
690721
coords = [np.linspace(0, 1, num=n, endpoint=False) for n in gridSizes]
691722
s = [None] * dim
692-
u0 = np.array(np.arange(nVar) + 1)[:, *s]
723+
u0 = np.array(np.arange(nVar) + 1)[(slice(None), *s)]
693724
for x in np.meshgrid(*coords, indexing="ij"):
694725
u0 = u0 * x
695726
return coords, u0
@@ -711,8 +742,7 @@ def writeFields_MPI(fileName, dtypeIdx, algo, nSteps, nVar, gridSizes):
711742
iLoc, nLoc = blocks.localBounds
712743
Rectilinear.setupMPI(comm, iLoc, nLoc)
713744
s = [slice(i, i + n) for i, n in zip(iLoc, nLoc)]
714-
u0 = u0[:, *s]
715-
print(MPI_RANK, u0.shape)
745+
u0 = u0[(slice(None), *s)]
716746

717747
f1 = Rectilinear(DTYPES[dtypeIdx], fileName)
718748
f1.setHeader(nVar=nVar, coords=coords)
@@ -731,6 +761,11 @@ def writeFields_MPI(fileName, dtypeIdx, algo, nSteps, nVar, gridSizes):
731761
def compareFields_MPI(fileName, u0, nSteps):
732762
from pySDC.helpers.fieldsIO import FieldsIO
733763

764+
comm = MPI.COMM_WORLD
765+
MPI_RANK = comm.Get_rank()
766+
if MPI_RANK == 0:
767+
print("Comparing fields with MPI")
768+
734769
f2 = FieldsIO.fromFile(fileName)
735770

736771
times = np.arange(nSteps) / nSteps

pySDC/tests/test_helpers/test_fieldsIO.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,6 @@
33
import glob
44
import pytest
55

6-
if sys.version_info < (3, 11):
7-
pytest.skip("skipping fieldsIO tests on python lower than 3.11", allow_module_level=True)
8-
96
import itertools
107
import numpy as np
118

@@ -14,6 +11,7 @@
1411
FieldsIO.ALLOW_OVERWRITE = True
1512

1613

14+
@pytest.mark.base
1715
@pytest.mark.parametrize("dtypeIdx", DTYPES.keys())
1816
@pytest.mark.parametrize("dim", range(4))
1917
def testHeader(dim, dtypeIdx):
@@ -65,6 +63,7 @@ def testHeader(dim, dtypeIdx):
6563
assert np.allclose(val, f2.header[key]), f"header's discrepancy for {key} in written {f2}"
6664

6765

66+
@pytest.mark.base
6867
@pytest.mark.parametrize("dtypeIdx", DTYPES.keys())
6968
@pytest.mark.parametrize("nSteps", [1, 2, 10, 100])
7069
@pytest.mark.parametrize("nVar", [1, 2, 5])
@@ -106,6 +105,7 @@ def testScalar(nVar, nSteps, dtypeIdx):
106105
assert np.allclose(u2, u1), f"{idx}'s fields in {f1} has incorrect values"
107106

108107

108+
@pytest.mark.base
109109
@pytest.mark.parametrize("dtypeIdx", DTYPES.keys())
110110
@pytest.mark.parametrize("nSteps", [1, 2, 5, 10])
111111
@pytest.mark.parametrize("nVar", [1, 2, 5])
@@ -155,6 +155,7 @@ def testRectilinear(dim, nVar, nSteps, dtypeIdx):
155155
assert np.allclose(u2, u1), f"{idx}'s fields in {f1} has incorrect values"
156156

157157

158+
@pytest.mark.base
158159
@pytest.mark.parametrize("nSteps", [1, 10])
159160
@pytest.mark.parametrize("nZ", [1, 5, 16])
160161
@pytest.mark.parametrize("nY", [1, 5, 16])
@@ -249,8 +250,7 @@ def testRectilinear_MPI(dim, nProcs, dtypeIdx, algo, nSteps, nVar):
249250
parser.add_argument('--gridSizes', type=int, nargs='+', help="number of grid points in each dimensions")
250251
args = parser.parse_args()
251252

252-
if sys.version_info >= (3, 11):
253-
from pySDC.helpers.fieldsIO import writeFields_MPI, compareFields_MPI
253+
from pySDC.helpers.fieldsIO import writeFields_MPI, compareFields_MPI
254254

255-
u0 = writeFields_MPI(**args.__dict__)
256-
compareFields_MPI(args.fileName, u0, args.nSteps)
255+
u0 = writeFields_MPI(**args.__dict__)
256+
compareFields_MPI(args.fileName, u0, args.nSteps)

pySDC/tests/test_helpers/test_gusto_coupling.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -165,7 +165,7 @@ def test_generic_gusto_problem(setup):
165165
error = abs(un_forward - un_ref) / abs(un_ref)
166166

167167
assert (
168-
error < np.finfo(float).eps * 1e2
168+
error < np.finfo(float).eps * 1e4
169169
), f'Forward Euler does not match reference implementation! Got relative difference of {error}'
170170

171171
# test backward Euler step
@@ -326,7 +326,7 @@ def run(stepper, n_steps):
326326
print(error)
327327

328328
assert (
329-
error < solver_parameters['snes_rtol'] * 1e3
329+
error < solver_parameters['snes_rtol'] * 1e4
330330
), f'pySDC and Gusto differ in method {method}! Got relative difference of {error}'
331331

332332

@@ -449,7 +449,7 @@ def run(stepper, n_steps):
449449
print(error)
450450

451451
assert (
452-
error < solver_parameters['snes_rtol'] * 1e3
452+
error < solver_parameters['snes_rtol'] * 1e4
453453
), f'pySDC and Gusto differ in SDC! Got relative difference of {error}'
454454

455455

@@ -633,7 +633,7 @@ def test_pySDC_integrator_MSSDC(n_steps, useMPIController, setup, submit=True, n
633633
my_env = os.environ.copy()
634634
my_env['COVERAGE_PROCESS_START'] = 'pyproject.toml'
635635
cwd = '.'
636-
cmd = f'mpiexec -np {n_tasks} python {__file__} --test=MSSDC --n_steps={n_steps}'.split()
636+
cmd = f'mpiexec -np {n_tasks} --oversubscribe python {__file__} --test=MSSDC --n_steps={n_steps}'.split()
637637

638638
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=my_env, cwd=cwd)
639639
p.wait()
@@ -762,7 +762,7 @@ def test_pySDC_integrator_MSSDC(n_steps, useMPIController, setup, submit=True, n
762762
print(error)
763763

764764
assert (
765-
error < solver_parameters['snes_rtol'] * 1e3
765+
error < solver_parameters['snes_rtol'] * 1e4
766766
), f'pySDC and Gusto differ in method {method}! Got relative difference of {error}'
767767

768768

pySDC/tests/test_tutorials/test_step_7.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -143,7 +143,7 @@ def test_E_MPI():
143143
my_env['COVERAGE_PROCESS_START'] = 'pyproject.toml'
144144
cwd = '.'
145145
num_procs = 3
146-
cmd = f'mpiexec -np {num_procs} python pySDC/tutorial/step_7/E_pySDC_with_Firedrake.py --useMPIsweeper'.split()
146+
cmd = f'mpiexec -np {num_procs} --oversubscribe python pySDC/tutorial/step_7/E_pySDC_with_Firedrake.py --useMPIsweeper'.split()
147147

148148
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=my_env, cwd=cwd)
149149
p.wait()

pySDC/tutorial/step_7/E_pySDC_with_Firedrake.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -170,7 +170,7 @@ def runHeatFiredrake(useMPIsweeper=False, ML=False):
170170

171171
# do tests that we got the same as last time
172172
n_nodes = 1 if useMPIsweeper else description['sweeper_params']['num_nodes']
173-
assert error[0][1] < 2e-8
173+
assert error[0][1] < 2e-7
174174
assert tot_iter == 10 if ML else 29
175175
assert tot_solver_setup == n_nodes
176176
assert tot_solves == n_nodes * tot_iter

pySDC/tutorial/step_7/F_pySDC_with_Gusto.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,7 @@ def williamson_5(
155155
lamda, phi, _ = lonlatr_from_xyz(x, y, z)
156156

157157
# Equation: coriolis
158-
parameters = ShallowWaterParameters(H=mean_depth, g=g)
158+
parameters = ShallowWaterParameters(mesh, H=mean_depth, g=g)
159159
Omega = parameters.Omega
160160
fexpr = 2 * Omega * z / radius
161161

0 commit comments

Comments
 (0)