Skip to content

Commit 748432a

Browse files
committed
Merge branch 'master' into neuralpint
2 parents af3215f + 0ef2b34 commit 748432a

File tree

19 files changed

+301
-175
lines changed

19 files changed

+301
-175
lines changed

CHANGELOG.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,10 @@
22

33
:arrow_left: [Back to main page](./README.md)
44

5+
- April 11, 2025: Version 5.6 adds a framework for MPI-parallel I/O, developed by [\@tlunet](https://github.com/tlunet), making it easier to visualize the data obtained with pySDC on HPC machines in software such as ParaView.
6+
Also, pySDC is now compatible with the finite element library [Firedrake](https://github.com/firedrakeproject/firedrake) and the geophysical fluid dynamics library
7+
[Gusto](https://github.com/firedrakeproject/gusto), thanks to [\@jshipton](https://github.com/jshipton) and [\@brownbaerchen](https://github.com/brownbaerchen).
8+
The former allows to setup PDEs with finite element discretizations in pySDC and then solve in time with SDC and PFASST, while the latter allows to setup a geophysical fluid dynamics problem and then use pySDC with any SDC setup as a timestepper in Gusto.
59
- June 24, 2024: Major summer cleanup with Version 5.5. [\@tlunet](https://github.com/tlunet) extracted all quadrature-related stuff into his new standalone code
610
[qmat](https://github.com/Parallel-in-Time/qmat), which makes pySDC much more focussed and both parts easier to maintain.
711
[\@lisawim](https://github.com/lisawim) worked a lot on the DAE sweepers (including an MPI-parallel version), while [\@brownbaerchen](https://github.com/brownbaerchen) has fun with GPUs.

CITATION.cff

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,11 +28,15 @@ authors:
2828
given-names: Jakob
2929
orcid: https://orcid.org/0000-0001-6280-8396
3030
affiliation: "Jülich Supercomputing Centre, Forschungszentrum Jülich GmbH, 52425 Jülich, Germany"
31+
- family-names: Shipton
32+
given-names: Jemma
33+
orcid: https://orcid.org/0000-0002-8635-0831
34+
affiliation: "Department of Mathematics and Statistics, University of Exeter, Exeter, UK"
3135

3236

33-
version: 5.5.2
37+
version: 5.6
3438
doi: 10.5281/zenodo.594191
35-
date-released: 2024-09-23
39+
date-released: 2025-04-11
3640
keywords:
3741
- "parallel-in-time"
3842
- "spectral deferred corrections"

docs/source/conf.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -70,9 +70,9 @@
7070
# built documents.
7171
#
7272
# The short X.Y version.
73-
version = '5.5'
73+
version = '5.6'
7474
# The full version, including alpha/beta/rc tags.
75-
release = '5.5.2'
75+
release = '5.6
7676

7777
# The language for content autogenerated by Sphinx. Refer to documentation
7878
# for a list of supported languages.

etc/environment-mpi4py.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,3 +14,4 @@ dependencies:
1414
- pip
1515
- pip:
1616
- qmat>=0.1.8
17+
- pytest-isolate-mpi

pySDC/core/problem.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -80,6 +80,18 @@ def f_init(self):
8080
def get_default_sweeper_class(cls):
8181
raise NotImplementedError(f'No default sweeper class implemented for {cls} problem!')
8282

83+
def setUpFieldsIO(self):
84+
"""
85+
Set up FieldsIO for MPI with the space decomposition of this problem
86+
"""
87+
pass
88+
89+
def getOutputFile(self, fileName):
90+
raise NotImplementedError(f'No output implemented file for {type(self).__name__}')
91+
92+
def processSolutionForOutput(self, u):
93+
return u
94+
8395
def eval_f(self, u, t):
8496
"""
8597
Abstract interface to RHS computation of the ODE

pySDC/helpers/fieldsIO.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -203,9 +203,10 @@ def initialize(self):
203203
assert not self.initialized, "FieldsIO already initialized"
204204

205205
if not self.ALLOW_OVERWRITE:
206-
assert not os.path.isfile(
207-
self.fileName
208-
), f"file {self.fileName!r} already exists, use FieldsIO.ALLOW_OVERWRITE = True to allow overwriting"
206+
if os.path.isfile(self.fileName):
207+
raise FileExistsError(
208+
f"file {self.fileName!r} already exists, use FieldsIO.ALLOW_OVERWRITE = True to allow overwriting"
209+
)
209210

210211
with open(self.fileName, "w+b") as f:
211212
self.hBase.tofile(f)

pySDC/helpers/spectral_helper.py

Lines changed: 43 additions & 121 deletions
Original file line numberDiff line numberDiff line change
@@ -1044,7 +1044,7 @@ def remove_BC(self, component, equation, axis, kind, line=-1, scalar=False, **kw
10441044
"""
10451045
Remove a BC from the matrix. This is useful e.g. when you add a non-scalar BC and then need to selectively
10461046
remove single BCs again, as in incompressible Navier-Stokes, for instance.
1047-
Forward arguments for the boundary conditions using `kwargs`. Refer to documentation of 1D bases for details.
1047+
Forwards arguments for the boundary conditions using `kwargs`. Refer to documentation of 1D bases for details.
10481048
10491049
Args:
10501050
component (str): Name of the component the BC should act on
@@ -1311,15 +1311,15 @@ def get_wavenumbers(self):
13111311
"""
13121312
Get grid in spectral space
13131313
"""
1314-
grids = [self.axes[i].get_wavenumbers()[self.local_slice[i]] for i in range(len(self.axes))][::-1]
1315-
return self.xp.meshgrid(*grids)
1314+
grids = [self.axes[i].get_wavenumbers()[self.local_slice[i]] for i in range(len(self.axes))]
1315+
return self.xp.meshgrid(*grids, indexing='ij')
13161316

13171317
def get_grid(self):
13181318
"""
13191319
Get grid in physical space
13201320
"""
1321-
grids = [self.axes[i].get_1dgrid()[self.local_slice[i]] for i in range(len(self.axes))][::-1]
1322-
return self.xp.meshgrid(*grids)
1321+
grids = [self.axes[i].get_1dgrid()[self.local_slice[i]] for i in range(len(self.axes))]
1322+
return self.xp.meshgrid(*grids, indexing='ij')
13231323

13241324
def get_fft(self, axes=None, direction='object', padding=None, shape=None):
13251325
"""
@@ -1853,6 +1853,26 @@ def get_local_slice_of_1D_matrix(self, M, axis):
18531853
"""
18541854
return M.tocsc()[self.local_slice[axis], self.local_slice[axis]]
18551855

1856+
def expand_matrix_ND(self, matrix, aligned):
1857+
sp = self.sparse_lib
1858+
axes = np.delete(np.arange(self.ndim), aligned)
1859+
ndim = len(axes) + 1
1860+
1861+
if ndim == 1:
1862+
return matrix
1863+
elif ndim == 2:
1864+
axis = axes[0]
1865+
I1D = sp.eye(self.axes[axis].N)
1866+
1867+
mats = [None] * ndim
1868+
mats[aligned] = self.get_local_slice_of_1D_matrix(matrix, aligned)
1869+
mats[axis] = self.get_local_slice_of_1D_matrix(I1D, axis)
1870+
1871+
return sp.kron(*mats)
1872+
1873+
else:
1874+
raise NotImplementedError(f'Matrix expansion not implemented for {ndim} dimensions!')
1875+
18561876
def get_filter_matrix(self, axis, **kwargs):
18571877
"""
18581878
Get bandpass filter along `axis`. See the documentation `get_filter_matrix` in the 1D bases for what kwargs are
@@ -1878,31 +1898,10 @@ def get_differentiation_matrix(self, axes, **kwargs):
18781898
Returns:
18791899
sparse differentiation matrix
18801900
"""
1881-
sp = self.sparse_lib
1882-
ndim = self.ndim
1883-
1884-
if ndim == 1:
1885-
D = self.axes[0].get_differentiation_matrix(**kwargs)
1886-
elif ndim == 2:
1887-
for axis in axes:
1888-
axis2 = (axis + 1) % ndim
1889-
D1D = self.axes[axis].get_differentiation_matrix(**kwargs)
1890-
1891-
if len(axes) > 1:
1892-
I1D = sp.eye(self.axes[axis2].N)
1893-
else:
1894-
I1D = self.axes[axis2].get_Id()
1895-
1896-
mats = [None] * ndim
1897-
mats[axis] = self.get_local_slice_of_1D_matrix(D1D, axis)
1898-
mats[axis2] = self.get_local_slice_of_1D_matrix(I1D, axis2)
1899-
1900-
if axis == axes[0]:
1901-
D = sp.kron(*mats)
1902-
else:
1903-
D = D @ sp.kron(*mats)
1904-
else:
1905-
raise NotImplementedError(f'Differentiation matrix not implemented for {ndim} dimension!')
1901+
D = self.expand_matrix_ND(self.axes[axes[0]].get_differentiation_matrix(**kwargs), axes[0])
1902+
for axis in axes[1:]:
1903+
_D = self.axes[axis].get_differentiation_matrix(**kwargs)
1904+
D = D @ self.expand_matrix_ND(_D, axis)
19061905

19071906
return D
19081907

@@ -1916,31 +1915,10 @@ def get_integration_matrix(self, axes):
19161915
Returns:
19171916
sparse integration matrix
19181917
"""
1919-
sp = self.sparse_lib
1920-
ndim = len(self.axes)
1921-
1922-
if ndim == 1:
1923-
S = self.axes[0].get_integration_matrix()
1924-
elif ndim == 2:
1925-
for axis in axes:
1926-
axis2 = (axis + 1) % ndim
1927-
S1D = self.axes[axis].get_integration_matrix()
1928-
1929-
if len(axes) > 1:
1930-
I1D = sp.eye(self.axes[axis2].N)
1931-
else:
1932-
I1D = self.axes[axis2].get_Id()
1933-
1934-
mats = [None] * ndim
1935-
mats[axis] = self.get_local_slice_of_1D_matrix(S1D, axis)
1936-
mats[axis2] = self.get_local_slice_of_1D_matrix(I1D, axis2)
1937-
1938-
if axis == axes[0]:
1939-
S = sp.kron(*mats)
1940-
else:
1941-
S = S @ sp.kron(*mats)
1942-
else:
1943-
raise NotImplementedError(f'Integration matrix not implemented for {ndim} dimension!')
1918+
S = self.expand_matrix_ND(self.axes[axes[0]].get_integration_matrix(), axes[0])
1919+
for axis in axes[1:]:
1920+
_S = self.axes[axis].get_integration_matrix()
1921+
S = S @ self.expand_matrix_ND(_S, axis)
19441922

19451923
return S
19461924

@@ -1951,27 +1929,10 @@ def get_Id(self):
19511929
Returns:
19521930
sparse identity matrix
19531931
"""
1954-
sp = self.sparse_lib
1955-
ndim = self.ndim
1956-
I = sp.eye(np.prod(self.init[0][1:]), dtype=complex)
1957-
1958-
if ndim == 1:
1959-
I = self.axes[0].get_Id()
1960-
elif ndim == 2:
1961-
for axis in range(ndim):
1962-
axis2 = (axis + 1) % ndim
1963-
I1D = self.axes[axis].get_Id()
1964-
1965-
I1D2 = sp.eye(self.axes[axis2].N)
1966-
1967-
mats = [None] * ndim
1968-
mats[axis] = self.get_local_slice_of_1D_matrix(I1D, axis)
1969-
mats[axis2] = self.get_local_slice_of_1D_matrix(I1D2, axis2)
1970-
1971-
I = I @ sp.kron(*mats)
1972-
else:
1973-
raise NotImplementedError(f'Identity matrix not implemented for {ndim} dimension!')
1974-
1932+
I = self.expand_matrix_ND(self.axes[0].get_Id(), 0)
1933+
for axis in range(1, self.ndim):
1934+
_I = self.axes[axis].get_Id()
1935+
I = I @ self.expand_matrix_ND(_I, axis)
19751936
return I
19761937

19771938
def get_Dirichlet_recombination_matrix(self, axis=-1):
@@ -1984,26 +1945,8 @@ def get_Dirichlet_recombination_matrix(self, axis=-1):
19841945
Returns:
19851946
sparse matrix
19861947
"""
1987-
sp = self.sparse_lib
1988-
ndim = len(self.axes)
1989-
1990-
if ndim == 1:
1991-
C = self.axes[0].get_Dirichlet_recombination_matrix()
1992-
elif ndim == 2:
1993-
axis2 = (axis + 1) % ndim
1994-
C1D = self.axes[axis].get_Dirichlet_recombination_matrix()
1995-
1996-
I1D = self.axes[axis2].get_Id()
1997-
1998-
mats = [None] * ndim
1999-
mats[axis] = self.get_local_slice_of_1D_matrix(C1D, axis)
2000-
mats[axis2] = self.get_local_slice_of_1D_matrix(I1D, axis2)
2001-
2002-
C = sp.kron(*mats)
2003-
else:
2004-
raise NotImplementedError(f'Basis change matrix not implemented for {ndim} dimension!')
2005-
2006-
return C
1948+
C1D = self.axes[axis].get_Dirichlet_recombination_matrix()
1949+
return self.expand_matrix_ND(C1D, axis)
20071950

20081951
def get_basis_change_matrix(self, axes=None, **kwargs):
20091952
"""
@@ -2018,30 +1961,9 @@ def get_basis_change_matrix(self, axes=None, **kwargs):
20181961
"""
20191962
axes = tuple(-i - 1 for i in range(self.ndim)) if axes is None else axes
20201963

2021-
sp = self.sparse_lib
2022-
ndim = len(self.axes)
2023-
2024-
if ndim == 1:
2025-
C = self.axes[0].get_basis_change_matrix(**kwargs)
2026-
elif ndim == 2:
2027-
for axis in axes:
2028-
axis2 = (axis + 1) % ndim
2029-
C1D = self.axes[axis].get_basis_change_matrix(**kwargs)
2030-
2031-
if len(axes) > 1:
2032-
I1D = sp.eye(self.axes[axis2].N)
2033-
else:
2034-
I1D = self.axes[axis2].get_Id()
2035-
2036-
mats = [None] * ndim
2037-
mats[axis] = self.get_local_slice_of_1D_matrix(C1D, axis)
2038-
mats[axis2] = self.get_local_slice_of_1D_matrix(I1D, axis2)
2039-
2040-
if axis == axes[0]:
2041-
C = sp.kron(*mats)
2042-
else:
2043-
C = C @ sp.kron(*mats)
2044-
else:
2045-
raise NotImplementedError(f'Basis change matrix not implemented for {ndim} dimension!')
1964+
C = self.expand_matrix_ND(self.axes[axes[0]].get_basis_change_matrix(**kwargs), axes[0])
1965+
for axis in axes[1:]:
1966+
_C = self.axes[axis].get_basis_change_matrix(**kwargs)
1967+
C = C @ self.expand_matrix_ND(_C, axis)
20461968

20471969
return C

pySDC/implementations/hooks/log_solution.py

Lines changed: 63 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@
22
import pickle
33
import os
44
import numpy as np
5+
from pySDC.helpers.fieldsIO import FieldsIO
6+
from pySDC.core.errors import DataError
57

68

79
class LogSolution(Hooks):
@@ -68,7 +70,7 @@ def post_iteration(self, step, level_number):
6870
)
6971

7072

71-
class LogToFile(Hooks):
73+
class LogToPickleFile(Hooks):
7274
r"""
7375
Hook for logging the solution to file after the step using pickle.
7476
@@ -171,7 +173,7 @@ def load(cls, index):
171173
return pickle.load(file)
172174

173175

174-
class LogToFileAfterXs(LogToFile):
176+
class LogToPickleFileAfterXS(LogToPickleFile):
175177
r'''
176178
Log to file after certain amount of time has passed instead of after every step
177179
'''
@@ -200,3 +202,62 @@ def process_solution(L):
200202
}
201203

202204
self.log_to_file(step, level_number, type(self).logging_condition(L), process_solution=process_solution)
205+
206+
207+
class LogToFile(Hooks):
208+
filename = 'myRun.pySDC'
209+
time_increment = 0
210+
allow_overwriting = False
211+
212+
def __init__(self):
213+
super().__init__()
214+
self.outfile = None
215+
self.t_next_log = 0
216+
FieldsIO.ALLOW_OVERWRITE = self.allow_overwriting
217+
218+
def pre_run(self, step, level_number):
219+
if level_number > 0:
220+
return None
221+
L = step.levels[level_number]
222+
223+
# setup outfile
224+
if os.path.isfile(self.filename) and L.time > 0:
225+
L.prob.setUpFieldsIO()
226+
self.outfile = FieldsIO.fromFile(self.filename)
227+
self.logger.info(
228+
f'Set up file {self.filename!r} for writing output. This file already contains solutions up to t={self.outfile.times[-1]:.4f}.'
229+
)
230+
else:
231+
self.outfile = L.prob.getOutputFile(self.filename)
232+
self.logger.info(f'Set up file {self.filename!r} for writing output.')
233+
234+
# write initial conditions
235+
if L.time not in self.outfile.times:
236+
self.outfile.addField(time=L.time, field=L.prob.processSolutionForOutput(L.u[0]))
237+
self.logger.info(f'Written initial conditions at t={L.time:4f} to file')
238+
239+
def post_step(self, step, level_number):
240+
if level_number > 0:
241+
return None
242+
243+
L = step.levels[level_number]
244+
245+
if self.t_next_log == 0:
246+
self.t_next_log = L.time + self.time_increment
247+
248+
if L.time + L.dt >= self.t_next_log and not step.status.restart:
249+
value_exists = True in [abs(me - (L.time + L.dt)) < np.finfo(float).eps * 1000 for me in self.outfile.times]
250+
if value_exists and not self.allow_overwriting:
251+
raise DataError(f'Already have recorded data for time {L.time + L.dt} in this file!')
252+
self.outfile.addField(time=L.time + L.dt, field=L.prob.processSolutionForOutput(L.uend))
253+
self.logger.info(f'Written solution at t={L.time+L.dt:.4f} to file')
254+
self.t_next_log = max([L.time + L.dt, self.t_next_log]) + self.time_increment
255+
256+
@classmethod
257+
def load(cls, index):
258+
data = {}
259+
file = FieldsIO.fromFile(cls.filename)
260+
file_entry = file.readField(idx=index)
261+
data['u'] = file_entry[1]
262+
data['t'] = file_entry[0]
263+
return data

0 commit comments

Comments
 (0)