Skip to content

Commit 2795143

Browse files
committed
TL: refactoring following thomas's suggestions
1 parent 7bac672 commit 2795143

File tree

3 files changed

+32
-32
lines changed

3 files changed

+32
-32
lines changed

pySDC/helpers/fieldsIO.py

Lines changed: 18 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -417,7 +417,7 @@ def setHeader(self, nVar, coords):
417417
@property
418418
def hInfos(self):
419419
"""Array representing the grid structure to be written in the binary file."""
420-
return [np.array([self.nVar, self.dim, *self.nX], dtype=np.int32)] + [
420+
return [np.array([self.nVar, self.dim, *self.gridSizes], dtype=np.int32)] + [
421421
np.array(coord, dtype=np.float64) for coord in self.header["coords"]
422422
]
423423

@@ -431,31 +431,31 @@ def readHeader(self, f):
431431
File to read the header from.
432432
"""
433433
nVar, dim = np.fromfile(f, dtype=np.int32, count=2)
434-
nX = np.fromfile(f, dtype=np.int32, count=dim)
435-
coords = [np.fromfile(f, dtype=np.float64, count=n) for n in nX]
434+
gridSizes = np.fromfile(f, dtype=np.int32, count=dim)
435+
coords = [np.fromfile(f, dtype=np.float64, count=n) for n in gridSizes]
436436
self.setHeader(nVar, coords)
437437

438438
def reshape(self, fields: np.ndarray):
439439
"""Reshape the fields to a N-d array (inplace operation)"""
440-
fields.shape = (self.nVar, *self.nX)
440+
fields.shape = (self.nVar, *self.gridSizes)
441441

442442
# -------------------------------------------------------------------------
443443
# Class specifics
444444
# -------------------------------------------------------------------------
445445
@property
446-
def nX(self):
446+
def gridSizes(self):
447447
"""Number of points in y direction"""
448448
return [coord.size for coord in self.header["coords"]]
449449

450450
@property
451451
def dim(self):
452452
"""Number of grid dimensions"""
453-
return len(self.nX)
453+
return len(self.gridSizes)
454454

455455
@property
456456
def nDoF(self):
457457
"""Number of degrees of freedom for one variable"""
458-
return np.prod(self.nX)
458+
return np.prod(self.gridSizes)
459459

460460
def toVTR(self, baseName, varNames, suffix="{:06d}_t={:1.2f}s"):
461461
"""
@@ -625,22 +625,22 @@ def addField(self, time, field):
625625
self.MPI_WRITE(np.array(time, dtype=T_DTYPE))
626626
offset0 += self.tSize
627627

628-
for (iVar, *iX) in itertools.product(range(self.nVar), *[range(nX) for nX in self.nLoc[:-1]]):
629-
offset = offset0 + self.iPos(iVar, iX) * self.itemSize
630-
self.MPI_WRITE_AT(offset, field[iVar, *iX])
628+
for (iVar, *iBeg) in itertools.product(range(self.nVar), *[range(n) for n in self.nLoc[:-1]]):
629+
offset = offset0 + self.iPos(iVar, iBeg) * self.itemSize
630+
self.MPI_WRITE_AT(offset, field[iVar, *iBeg])
631631
self.MPI_FILE_CLOSE()
632632

633633
def iPos(self, iVar, iX):
634634
iPos = iVar * self.nDoF
635635
for axis in range(self.dim - 1):
636-
iPos += (self.iLoc[axis] + iX[axis]) * np.prod(self.nX[axis + 1 :])
636+
iPos += (self.iLoc[axis] + iX[axis]) * np.prod(self.gridSizes[axis + 1 :])
637637
iPos += self.iLoc[-1]
638638
return iPos
639639

640640
def readField(self, idx):
641641
"""
642642
Read one field stored in the binary file, corresponding to the given
643-
time index, eventually in MPI mode.
643+
time index, using MPI in the eventuality of space parallel decomposition.
644644
645645
Parameters
646646
----------
@@ -670,9 +670,9 @@ def readField(self, idx):
670670
field = np.empty((self.nVar, *self.nLoc), dtype=self.dtype)
671671

672672
self.MPI_FILE_OPEN(mode="r")
673-
for (iVar, *iX) in itertools.product(range(self.nVar), *[range(nX) for nX in self.nLoc[:-1]]):
674-
offset = offset0 + self.iPos(iVar, iX) * self.itemSize
675-
self.MPI_READ_AT(offset, field[iVar, *iX])
673+
for (iVar, *iBeg) in itertools.product(range(self.nVar), *[range(n) for n in self.nLoc[:-1]]):
674+
offset = offset0 + self.iPos(iVar, iBeg) * self.itemSize
675+
self.MPI_READ_AT(offset, field[iVar, *iBeg])
676676
self.MPI_FILE_CLOSE()
677677

678678
return t, field
@@ -691,8 +691,8 @@ def initGrid(nVar, gridSizes):
691691
return coords, u0
692692

693693

694-
def writeFields_MPI(fileName, dtypeIdx, algo, nSteps, nVar, nX):
695-
coords, u0 = initGrid(nVar, nX)
694+
def writeFields_MPI(fileName, dtypeIdx, algo, nSteps, nVar, gridSizes):
695+
coords, u0 = initGrid(nVar, gridSizes)
696696

697697
from mpi4py import MPI
698698
from pySDC.helpers.blocks import BlockDecomposition
@@ -702,7 +702,7 @@ def writeFields_MPI(fileName, dtypeIdx, algo, nSteps, nVar, nX):
702702
MPI_SIZE = comm.Get_size()
703703
MPI_RANK = comm.Get_rank()
704704

705-
blocks = BlockDecomposition(MPI_SIZE, nX, algo, MPI_RANK)
705+
blocks = BlockDecomposition(MPI_SIZE, gridSizes, algo, MPI_RANK)
706706

707707
iLoc, nLoc = blocks.localBounds
708708
Rectilinear.setupMPI(comm, iLoc, nLoc)

pySDC/helpers/vtkIO.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -94,16 +94,16 @@ def readFromVTR(fileName: str):
9494
reader.Update()
9595

9696
vtr = reader.GetOutput()
97-
dims = vtr.GetDimensions()
98-
assert len(dims) == 3, "can only read 3D data"
97+
gridSizes = vtr.GetDimensions()
98+
assert len(gridSizes) == 3, "can only read 3D data"
9999

100100
def vect(x):
101101
return numpy_support.vtk_to_numpy(x)
102102

103103
coords = [vect(vtr.GetXCoordinates()), vect(vtr.GetYCoordinates()), vect(vtr.GetZCoordinates())]
104104
pointData = vtr.GetPointData()
105105
varNames = [pointData.GetArrayName(i) for i in range(pointData.GetNumberOfArrays())]
106-
data = [numpy_support.vtk_to_numpy(pointData.GetArray(name)).reshape(dims, order="F") for name in varNames]
106+
data = [numpy_support.vtk_to_numpy(pointData.GetArray(name)).reshape(gridSizes, order="F") for name in varNames]
107107
data = np.array(data)
108108

109109
return data, coords, varNames

pySDC/tests/test_helpers/test_fieldsIO.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -116,20 +116,20 @@ def testRectilinear(dim, nVar, nSteps, dtypeIdx):
116116
fileName = f"testRectilinear{dim}D.pysdc"
117117
dtype = DTYPES[dtypeIdx]
118118

119-
for nX in itertools.product(*[[5, 10, 16]] * dim):
119+
for gridSizes in itertools.product(*[[5, 10, 16]] * dim):
120120

121-
coords = [np.linspace(0, 1, num=n, endpoint=False) for n in nX]
121+
coords = [np.linspace(0, 1, num=n, endpoint=False) for n in gridSizes]
122122

123123
f1 = Rectilinear(dtype, fileName)
124124
f1.setHeader(nVar=nVar, coords=coords)
125125

126126
assert f1.dim == dim, f"{f1} has incorrect dimension"
127-
assert f1.nX == list(nX), f"{f1} has incorrect nX"
128-
assert f1.nDoF == np.prod(nX), f"{f1} has incorrect nDOF"
129-
assert f1.nItems == nVar * np.prod(nX), f"{f1} do not have nItems == nVar*nX**dim"
127+
assert f1.gridSizes == list(gridSizes), f"{f1} has incorrect gridSizes"
128+
assert f1.nDoF == np.prod(gridSizes), f"{f1} has incorrect nDOF"
129+
assert f1.nItems == nVar * np.prod(gridSizes), f"{f1} do not have nItems == nVar*product(gridSizes)"
130130

131131
f1.initialize()
132-
u0 = np.random.rand(nVar, *nX).astype(f1.dtype)
132+
u0 = np.random.rand(nVar, *gridSizes).astype(f1.dtype)
133133
times = np.arange(nSteps) / nSteps
134134

135135
for t in times:
@@ -208,10 +208,10 @@ def testRectilinear_MPI(dim, nProcs, dtypeIdx, algo, nSteps, nVar):
208208

209209
fileName = f"testRectilinear{dim}D_MPI.pysdc"
210210

211-
for nX in itertools.product(*[[61, 16]] * dim):
211+
for gridSizes in itertools.product(*[[61, 16]] * dim):
212212

213213
cmd = f"mpirun -np {nProcs} python {__file__} --fileName {fileName}"
214-
cmd += f" --dtypeIdx {dtypeIdx} --algo {algo} --nSteps {nSteps} --nVar {nVar} --nX {' '.join([str(n) for n in nX])}"
214+
cmd += f" --dtypeIdx {dtypeIdx} --algo {algo} --nSteps {nSteps} --nVar {nVar} --gridSizes {' '.join([str(n) for n in gridSizes])}"
215215

216216
p = subprocess.Popen(cmd.split(), cwd=".")
217217
p.wait()
@@ -224,9 +224,9 @@ def testRectilinear_MPI(dim, nProcs, dtypeIdx, algo, nSteps, nVar):
224224
assert type(f2) == Rectilinear, f"incorrect type in MPI written fields {f2}"
225225
assert f2.nFields == nSteps, f"incorrect nFields in MPI written fields {f2} ({f2.nFields} instead of {nSteps})"
226226
assert f2.nVar == nVar, f"incorrect nVar in MPI written fields {f2}"
227-
assert f2.nX == list(nX), f"incorrect nX in MPI written fields {f2}"
227+
assert f2.gridSizes == list(gridSizes), f"incorrect gridSizes in MPI written fields {f2}"
228228

229-
coords, u0 = initGrid(nVar, nX)
229+
coords, u0 = initGrid(nVar, gridSizes)
230230
for i, (cFile, cRef) in enumerate(zip(f2.header['coords'], coords)):
231231
assert np.allclose(cFile, cRef), f"incorrect coords[{i}] in MPI written fields {f2}"
232232

@@ -248,7 +248,7 @@ def testRectilinear_MPI(dim, nProcs, dtypeIdx, algo, nSteps, nVar):
248248
parser.add_argument('--algo', type=str, help="algorithm used for block decomposition")
249249
parser.add_argument('--nSteps', type=int, help="number of time-steps")
250250
parser.add_argument('--nVar', type=int, help="number of field variables")
251-
parser.add_argument('--nX', type=int, nargs='+', help="number of grid points in each dimensions")
251+
parser.add_argument('--gridSizes', type=int, nargs='+', help="number of grid points in each dimensions")
252252
args = parser.parse_args()
253253

254254
if sys.version_info >= (3, 11):

0 commit comments

Comments
 (0)