diff --git a/pySDC/implementations/hooks/log_solution.py b/pySDC/implementations/hooks/log_solution.py index e7e4501017..a176274360 100644 --- a/pySDC/implementations/hooks/log_solution.py +++ b/pySDC/implementations/hooks/log_solution.py @@ -208,6 +208,7 @@ class LogToFile(Hooks): filename = 'myRun.pySDC' time_increment = 0 allow_overwriting = False + counter = 0 # number of stored time points in the file def __init__(self): super().__init__() @@ -224,8 +225,9 @@ def pre_run(self, step, level_number): if os.path.isfile(self.filename) and L.time > 0: L.prob.setUpFieldsIO() self.outfile = FieldsIO.fromFile(self.filename) + self.counter = len(self.outfile.times) self.logger.info( - f'Set up file {self.filename!r} for writing output. This file already contains solutions up to t={self.outfile.times[-1]:.4f}.' + f'Set up file {self.filename!r} for writing output. This file already contains {self.counter} solutions up to t={self.outfile.times[-1]:.4f}.' ) else: self.outfile = L.prob.getOutputFile(self.filename) @@ -236,6 +238,9 @@ def pre_run(self, step, level_number): self.outfile.addField(time=L.time, field=L.prob.processSolutionForOutput(L.u[0])) self.logger.info(f'Written initial conditions at t={L.time:4f} to file') + type(self).counter = len(self.outfile.times) + self.logger.info(f'Will write to disk every {self.time_increment:.4e} time units') + def post_step(self, step, level_number): if level_number > 0: return None @@ -252,6 +257,20 @@ def post_step(self, step, level_number): self.outfile.addField(time=L.time + L.dt, field=L.prob.processSolutionForOutput(L.uend)) self.logger.info(f'Written solution at t={L.time+L.dt:.4f} to file') self.t_next_log = max([L.time + L.dt, self.t_next_log]) + self.time_increment + type(self).counter = len(self.outfile.times) + + def post_run(self, step, level_number): + if level_number > 0: + return None + + L = step.levels[level_number] + + value_exists = True in [abs(me - (L.time + L.dt)) < np.finfo(float).eps * 1000 for me in self.outfile.times] + if not value_exists: + self.outfile.addField(time=L.time + L.dt, field=L.prob.processSolutionForOutput(L.uend)) + self.logger.info(f'Written solution at t={L.time+L.dt:.4f} to file') + self.t_next_log = max([L.time + L.dt, self.t_next_log]) + self.time_increment + type(self).counter = len(self.outfile.times) @classmethod def load(cls, index): diff --git a/pySDC/implementations/problem_classes/RayleighBenard.py b/pySDC/implementations/problem_classes/RayleighBenard.py index 6c60ed5e4b..9bad277536 100644 --- a/pySDC/implementations/problem_classes/RayleighBenard.py +++ b/pySDC/implementations/problem_classes/RayleighBenard.py @@ -58,7 +58,9 @@ def __init__( BCs=None, dealiasing=3 / 2, comm=None, - Lx=8, + Lx=4, + Lz=1, + z0=0, **kwargs, ): """ @@ -73,11 +75,13 @@ def __init__( dealiasing (float): Dealiasing for evaluating the non-linear part in real space comm (mpi4py.Intracomm): Space communicator Lx (float): Horizontal length of the domain + Lz (float): Vertical length of the domain + z0 (float): Position of lower boundary """ BCs = {} if BCs is None else BCs BCs = { 'T_top': 0, - 'T_bottom': 2, + 'T_bottom': 1, 'v_top': 0, 'v_bottom': 0, 'u_top': 0, @@ -101,11 +105,16 @@ def __init__( 'dealiasing', 'comm', 'Lx', + 'Lz', + 'z0', localVars=locals(), readOnly=True, ) - bases = [{'base': 'fft', 'N': nx, 'x0': 0, 'x1': self.Lx}, {'base': 'ultraspherical', 'N': nz}] + bases = [ + {'base': 'fft', 'N': nx, 'x0': 0, 'x1': self.Lx}, + {'base': 'ultraspherical', 'N': nz, 'x0': self.z0, 'x1': self.Lz}, + ] components = ['u', 'v', 'T', 'p'] super().__init__(bases, components, comm=comm, **kwargs) @@ -247,8 +256,8 @@ def u_exact(self, t=0, noise_level=1e-3, seed=99): # linear temperature gradient for comp in ['T', 'v', 'u']: - a = (self.BCs[f'{comp}_top'] - self.BCs[f'{comp}_bottom']) / 2 - b = (self.BCs[f'{comp}_top'] + self.BCs[f'{comp}_bottom']) / 2 + a = (self.BCs[f'{comp}_top'] - self.BCs[f'{comp}_bottom']) / self.Lz + b = self.BCs[f'{comp}_bottom'] - a * self.z0 me[self.index(comp)] = a * self.Z + b # perturb slightly @@ -257,7 +266,7 @@ def u_exact(self, t=0, noise_level=1e-3, seed=99): noise = self.spectral.u_init noise[iT] = rng.random(size=me[iT].shape) - me[iT] += noise[iT].real * noise_level * (self.Z - 1) * (self.Z + 1) + me[iT] += noise[iT].real * noise_level * (self.Z - self.z0) * (self.Z - self.z0 + self.Lz) if self.spectral_space: me_hat = self.spectral.u_init_forward @@ -370,14 +379,41 @@ def compute_vorticity(self, u): u_hat = u.copy() else: u_hat = self.transform(u) + Dz = self.Dz Dx = self.Dx iu, iv = self.index(['u', 'v']) vorticity_hat = self.spectral.u_init_forward - vorticity_hat[0] = (Dx * u_hat[iv].flatten() + Dz @ u_hat[iu].flatten()).reshape(u[iu].shape) + vorticity_hat[0] = (Dx * u_hat[iv].flatten() + Dz @ u_hat[iu].flatten()).reshape(u_hat[iu].shape) return self.itransform(vorticity_hat)[0].real + def getOutputFile(self, fileName): + from pySDC.helpers.fieldsIO import Rectilinear + + self.setUpFieldsIO() + + coords = [me.get_1dgrid() for me in self.spectral.axes] + assert np.allclose([len(me) for me in coords], self.spectral.global_shape[1:]) + + fOut = Rectilinear(np.float64, fileName=fileName) + fOut.setHeader(nVar=len(self.components) + 1, coords=coords) + fOut.initialize() + return fOut + + def processSolutionForOutput(self, u): + vorticity = self.compute_vorticity(u) + + if self.spectral_space: + u_real = self.itransform(u).real + else: + u_real = u.real + + me = np.empty(shape=(u_real.shape[0] + 1, *vorticity.shape)) + me[:-1] = u_real + me[-1] = vorticity + return me + def compute_Nusselt_numbers(self, u): """ Compute the various versions of the Nusselt number. This reflects the type of heat transport. @@ -392,52 +428,46 @@ def compute_Nusselt_numbers(self, u): dict: Nusselt number averaged over the entire volume and horizontally averaged at the top and bottom. """ iv, iT = self.index(['v', 'T']) - - DzT_hat = self.spectral.u_init_forward + zAxis = self.spectral.axes[-1] if self.spectral_space: u_hat = u.copy() else: u_hat = self.transform(u) - DzT_hat[iT] = (self.Dz @ u_hat[iT].flatten()).reshape(DzT_hat[iT].shape) + DzT_hat = (self.Dz @ u_hat[iT].flatten()).reshape(u_hat[iT].shape) # compute vT with dealiasing padding = (self.dealiasing, self.dealiasing) u_pad = self.itransform(u_hat, padding=padding).real _me = self.xp.zeros_like(u_pad) _me[0] = u_pad[iv] * u_pad[iT] - vT_hat = self.transform(_me, padding=padding) + vT_hat = self.transform(_me, padding=padding)[0] - nusselt_hat = (vT_hat[0] - DzT_hat[iT]) / self.nx - nusselt_no_v_hat = (-DzT_hat[iT]) / self.nx + if not hasattr(self, '_zInt'): + self._zInt = zAxis.get_integration_matrix() - integral_z = self.xp.sum(nusselt_hat * self.spectral.axes[1].get_BC(kind='integral'), axis=-1).real - integral_V = ( - integral_z[0] * self.axes[0].L - ) # only the first Fourier mode has non-zero integral with periodic BCs - Nusselt_V = self.comm.bcast(integral_V / self.spectral.V, root=0) + nusselt_hat = (vT_hat / self.kappa - DzT_hat) * self.axes[-1].L - Nusselt_t = self.comm.bcast( - self.xp.sum(nusselt_hat * self.spectral.axes[1].get_BC(kind='Dirichlet', x=1), axis=-1).real[0], root=0 - ) - Nusselt_b = self.comm.bcast( - self.xp.sum(nusselt_hat * self.spectral.axes[1].get_BC(kind='Dirichlet', x=-1), axis=-1).real[0], root=0 - ) - Nusselt_no_v_t = self.comm.bcast( - self.xp.sum(nusselt_no_v_hat * self.spectral.axes[1].get_BC(kind='Dirichlet', x=1), axis=-1).real[0], root=0 - ) - Nusselt_no_v_b = self.comm.bcast( - self.xp.sum(nusselt_no_v_hat * self.spectral.axes[1].get_BC(kind='Dirichlet', x=-1), axis=-1).real[0], - root=0, - ) + # get coefficients for evaluation on the boundary + top = zAxis.get_BC(kind='Dirichlet', x=1) + bot = zAxis.get_BC(kind='Dirichlet', x=-1) + + integral_V = 0 + if self.comm.rank == 0: + + integral_z = (self._zInt @ nusselt_hat[0]).real + integral_z[0] = zAxis.get_integration_constant(integral_z, axis=-1) + integral_V = ((top - bot) * integral_z).sum() * self.axes[0].L / self.nx + + Nusselt_V = self.comm.bcast(integral_V / self.spectral.V, root=0) + Nusselt_t = self.comm.bcast(self.xp.sum(nusselt_hat.real[0] * top, axis=-1) / self.nx, root=0) + Nusselt_b = self.comm.bcast(self.xp.sum(nusselt_hat.real[0] * bot, axis=-1) / self.nx, root=0) return { 'V': Nusselt_V, 't': Nusselt_t, 'b': Nusselt_b, - 't_no_v': Nusselt_no_v_t, - 'b_no_v': Nusselt_no_v_b, } def compute_viscous_dissipation(self, u): @@ -509,8 +539,8 @@ def compute_max_step_size(P, u): grid_spacing_x = P.X[1, 0] - P.X[0, 0] cell_wallz = P.xp.zeros(P.nz + 1) - cell_wallz[0] = 1 - cell_wallz[-1] = -1 + cell_wallz[0] = P.Lz + cell_wallz[-1] = 0 cell_wallz[1:-1] = (P.Z[0, :-1] + P.Z[0, 1:]) / 2 grid_spacing_z = cell_wallz[:-1] - cell_wallz[1:] diff --git a/pySDC/projects/GPU/configs/RBC_configs.py b/pySDC/projects/GPU/configs/RBC_configs.py index 10076257d1..284c8c7279 100644 --- a/pySDC/projects/GPU/configs/RBC_configs.py +++ b/pySDC/projects/GPU/configs/RBC_configs.py @@ -29,54 +29,6 @@ class RayleighBenardRegular(Config): sweeper_type = 'IMEX' Tend = 50 - def get_LogToFile(self, ranks=None): - import numpy as np - from pySDC.implementations.hooks.log_solution import LogToPickleFileAfterXS as LogToFile - - LogToFile.path = f'{self.base_path}/data/' - LogToFile.file_name = f'{self.get_path(ranks=ranks)}-solution' - LogToFile.time_increment = 1e-1 - - def process_solution(L): - P = L.prob - - if P.spectral_space: - uend = P.itransform(L.uend) - - data = { - 't': L.time + L.dt, - 'local_slice': P.local_slice, - 'shape': P.global_shape, - } - - if P.useGPU: - data['u'] = uend.get().view(np.ndarray) - data['vorticity'] = L.prob.compute_vorticity(L.uend).get().view(np.ndarray) - if L.time == 0: - data['X'] = L.prob.X.get() - data['Z'] = L.prob.Z.get() - else: - data['u'] = uend.view(np.ndarray) - data['vorticity'] = L.prob.compute_vorticity(L.uend).view(np.ndarray) - if L.time == 0: - data['X'] = L.prob.X - data['Z'] = L.prob.Z - return data - - def logging_condition(L): - sweep = L.sweep - if hasattr(sweep, 'comm'): - if sweep.comm.rank == sweep.comm.size - 1: - return True - else: - return False - else: - return True - - LogToFile.process_solution = process_solution - LogToFile.logging_condition = logging_condition - return LogToFile - def get_controller_params(self, *args, **kwargs): from pySDC.implementations.hooks.log_step_size import LogStepSize @@ -125,12 +77,12 @@ def get_description(self, *args, MPIsweeper=False, res=-1, **kwargs): return desc def get_initial_condition(self, P, *args, restart_idx=0, **kwargs): - if restart_idx > 0: - return super().get_initial_condition(P, *args, restart_idx=restart_idx, **kwargs) - else: + if restart_idx == 0: u0 = P.u_exact(t=0, seed=P.comm.rank, noise_level=1e-3) u0_with_pressure = P.solve_system(u0, 1e-9, u0) return u0_with_pressure, 0 + else: + return super().get_initial_condition(P, *args, restart_idx=restart_idx, **kwargs) def prepare_caches(self, prob): """ @@ -139,6 +91,7 @@ def prepare_caches(self, prob): prob.eval_f(prob.u_init) def plot(self, P, idx, n_procs_list, quantitiy='T', quantitiy2='vorticity'): + from pySDC.helpers.fieldsIO import FieldsIO import numpy as np cmaps = {'vorticity': 'bwr', 'p': 'bwr'} @@ -147,48 +100,30 @@ def plot(self, P, idx, n_procs_list, quantitiy='T', quantitiy2='vorticity'): cax = P.cax axs = fig.get_axes() - buffer = {} - vmin = {quantitiy: np.inf, quantitiy2: np.inf} - vmax = {quantitiy: -np.inf, quantitiy2: -np.inf} - - X = {} - Z = {} - - for rank in range(n_procs_list[2]): - ranks = self.ranks[:-1] + [rank] - LogToFile = self.get_LogToFile(ranks=ranks) - - buffer[f'u-{rank}'] = LogToFile.load(idx) - - vmin[quantitiy2] = min([vmin[quantitiy2], buffer[f'u-{rank}'][quantitiy2].real.min()]) - vmax[quantitiy2] = max([vmax[quantitiy2], buffer[f'u-{rank}'][quantitiy2].real.max()]) - vmin[quantitiy] = min([vmin[quantitiy], buffer[f'u-{rank}']['u'][P.index(quantitiy)].real.min()]) - vmax[quantitiy] = max([vmax[quantitiy], buffer[f'u-{rank}']['u'][P.index(quantitiy)].real.max()]) - - first_frame = LogToFile.load(0) - X[rank] = first_frame['X'] - Z[rank] = first_frame['Z'] - - for rank in range(n_procs_list[2]): - im2 = axs[1].pcolormesh( - X[rank], - Z[rank], - buffer[f'u-{rank}'][quantitiy2].real, - vmin=-vmax[quantitiy2] if cmaps.get(quantitiy2, None) in ['bwr'] else vmin[quantitiy2], - vmax=vmax[quantitiy2], - cmap=cmaps.get(quantitiy2, None), - ) - im = axs[0].pcolormesh( - X[rank], - Z[rank], - buffer[f'u-{rank}']['u'][P.index(quantitiy)].real, - vmin=vmin[quantitiy], - vmax=-vmin[quantitiy] if cmaps.get(quantitiy, None) in ['bwr', 'seismic'] else vmax[quantitiy], - cmap=cmaps.get(quantitiy, 'plasma'), - ) + outfile = FieldsIO.fromFile(self.get_file_name()) + + x = outfile.header['coords'][0] + z = outfile.header['coords'][1] + X, Z = np.meshgrid(x, z, indexing='ij') + + t, data = outfile.readField(idx) + im = axs[0].pcolormesh( + X, + Z, + data[P.index(quantitiy)].real, + cmap=cmaps.get(quantitiy, 'plasma'), + ) + + im2 = axs[1].pcolormesh( + X, + Z, + data[-1].real, + cmap=cmaps.get(quantitiy2, None), + ) + fig.colorbar(im2, cax[1]) fig.colorbar(im, cax[0]) - axs[0].set_title(f't={buffer[f"u-{rank}"]["t"]:.2f}') + axs[0].set_title(f't={t:.2f}') axs[1].set_xlabel('x') axs[1].set_ylabel('z') axs[0].set_aspect(1.0) diff --git a/pySDC/projects/GPU/configs/base_config.py b/pySDC/projects/GPU/configs/base_config.py index 51fc3ecf69..565af7221a 100644 --- a/pySDC/projects/GPU/configs/base_config.py +++ b/pySDC/projects/GPU/configs/base_config.py @@ -59,6 +59,7 @@ class Config(object): sweeper_type = None Tend = None base_path = './' + logging_time_increment = 0.5 def __init__(self, args, comm_world=None): from mpi4py import MPI @@ -72,6 +73,21 @@ def __init__(self, args, comm_world=None): self.comms = [MPI.COMM_SELF, MPI.COMM_SELF, MPI.COMM_SELF] self.ranks = [me.rank for me in self.comms] + def get_file_name(self): + res = self.args['res'] + return f'{self.base_path}/data/{type(self).__name__}-res{res}.pySDC' + + def get_LogToFile(self, *args, **kwargs): + if self.comms[1].rank > 0: + return None + from pySDC.implementations.hooks.log_solution import LogToFile + + LogToFile.filename = self.get_file_name() + LogToFile.time_increment = self.logging_time_increment + LogToFile.allow_overwriting = True + + return LogToFile + def get_description(self, *args, MPIsweeper=False, useGPU=False, **kwargs): description = {} description['problem_class'] = None @@ -83,7 +99,8 @@ def get_description(self, *args, MPIsweeper=False, useGPU=False, **kwargs): description['convergence_controllers'] = {} if self.get_LogToFile(): - description['convergence_controllers'][LogStats] = {} + path = self.get_file_name()[:-6] + description['convergence_controllers'][LogStats] = {'path': path} if MPIsweeper: description['sweeper_params']['comm'] = self.comms[1] @@ -137,7 +154,27 @@ def plot(self, P, idx, num_procs_list): raise NotImplementedError def get_initial_condition(self, P, *args, restart_idx=0, **kwargs): - if restart_idx > 0: + if restart_idx == 0: + return P.u_exact(t=0), 0 + else: + + from pySDC.helpers.fieldsIO import FieldsIO + + P.setUpFieldsIO() + outfile = FieldsIO.fromFile(self.get_file_name()) + + t0, solution = outfile.readField(restart_idx) + solution = solution[: P.spectral.ncomponents, ...] + + u0 = P.u_init + + if P.spectral_space: + u0[...] = P.transform(solution) + else: + u0[...] = solution + + return u0, t0 + LogToFile = self.get_LogToFile() file = LogToFile.load(restart_idx) LogToFile.counter = restart_idx @@ -150,26 +187,19 @@ def get_initial_condition(self, P, *args, restart_idx=0, **kwargs): else: u0[...] = file['u'] return u0, file['t'] - else: - return P.u_exact(t=0), 0 - - def get_LogToFile(self): - return None class LogStats(ConvergenceController): - @staticmethod - def get_stats_path(hook, counter_offset=-1, index=None): - index = hook.counter + counter_offset if index is None else index - return f'{hook.path}/{hook.file_name}_{hook.format_index(index)}-stats.pickle' + def get_stats_path(self, index=0): + return f'{self.params.path}_{index:06d}-stats.pickle' def merge_all_stats(self, controller): hook = self.params.hook stats = {} - for i in range(hook.counter): - with open(self.get_stats_path(hook, index=i), 'rb') as file: + for i in range(hook.counter - 1): + with open(self.get_stats_path(index=i), 'rb') as file: _stats = pickle.load(file) stats = {**stats, **_stats} @@ -184,38 +214,37 @@ def reset_stats(self, controller): def setup(self, controller, params, *args, **kwargs): params['control_order'] = 999 if 'hook' not in params.keys(): - from pySDC.implementations.hooks.log_solution import LogToFileAfterXs + from pySDC.implementations.hooks.log_solution import LogToFile - params['hook'] = LogToFileAfterXs + params['hook'] = LogToFile self.counter = params['hook'].counter - return super().setup(controller, params, *args, **kwargs) def post_step_processing(self, controller, S, **kwargs): hook = self.params.hook - for _hook in controller.hooks: - _hook.post_step(S, 0) - P = S.levels[0].prob - skip_logging = False - if hasattr(P, 'comm'): - if P.comm.rank > 0: - skip_logging = True - return None while self.counter < hook.counter: - path = self.get_stats_path(hook, index=self.counter) + path = self.get_stats_path(index=hook.counter - 2) stats = controller.return_stats() - if hook.logging_condition(S.levels[0]) and not skip_logging: + store = True + if hasattr(S.levels[0].sweep, 'comm') and S.levels[0].sweep.comm.rank > 0: + store = False + elif P.comm.rank > 0: + store = False + if store: with open(path, 'wb') as file: pickle.dump(stats, file) self.log(f'Stored stats in {path!r}', S) + # print(stats) self.reset_stats(controller) self.counter = hook.counter - def post_run_processing(self, controller, *args, **kwargs): + def post_run_processing(self, controller, S, **kwargs): + self.post_step_processing(controller, S, **kwargs) + stats = self.merge_all_stats(controller) def return_stats(): diff --git a/pySDC/projects/GPU/run_experiment.py b/pySDC/projects/GPU/run_experiment.py index c0726861ae..b992306d74 100644 --- a/pySDC/projects/GPU/run_experiment.py +++ b/pySDC/projects/GPU/run_experiment.py @@ -32,12 +32,15 @@ def str_to_procs(me): def run_experiment(args, config, **kwargs): import pickle + import os # from pySDC.implementations.controller_classes.controller_MPI import controller_MPI from pySDC.implementations.controller_classes.controller_nonMPI import controller_nonMPI from pySDC.helpers.stats_helper import filter_stats type(config).base_path = args['o'] + os.makedirs(f'{args["o"]}/data', exist_ok=True) + description = config.get_description( useGPU=args['useGPU'], MPIsweeper=args['procs'][1] > 1, res=args['res'], **kwargs ) @@ -155,6 +158,7 @@ def make_video(args, config): # pragma: no cover cmd = f'ffmpeg -i {path} -pix_fmt yuv420p -r 9 -s 2048:1536 -y {path_target}'.split() subprocess.run(cmd) + print(f'Made video {path_target!r}') if __name__ == '__main__': diff --git a/pySDC/projects/GPU/tests/test_configs.py b/pySDC/projects/GPU/tests/test_configs.py index ab27a2621e..e65edf1ba1 100644 --- a/pySDC/projects/GPU/tests/test_configs.py +++ b/pySDC/projects/GPU/tests/test_configs.py @@ -35,76 +35,69 @@ def create_directories(): os.makedirs(path, exist_ok=True) -@pytest.mark.order(1) -def test_run_experiment(restart_idx=0): - from pySDC.projects.GPU.configs.base_config import Config - from pySDC.projects.GPU.run_experiment import run_experiment, parse_args +def test_run(tmpdir): + from pySDC.projects.GPU.configs.base_config import get_config + from pySDC.projects.GPU.run_experiment import run_experiment from pySDC.helpers.stats_helper import get_sorted + from pySDC.helpers.fieldsIO import FieldsIO import pickle import numpy as np - create_directories() - - class VdPConfig(Config): - sweeper_type = 'generic_implicit' - Tend = 1 - - def get_description(self, *args, **kwargs): - from pySDC.implementations.problem_classes.Van_der_Pol_implicit import vanderpol - - desc = super().get_description(*args, **kwargs) - desc['problem_class'] = vanderpol - desc['problem_params'].pop('useGPU') - desc['problem_params'].pop('comm') - desc['sweeper_params']['num_nodes'] = 2 - desc['sweeper_params']['quad_type'] = 'RADAU-RIGHT' - desc['sweeper_params']['QI'] = 'LU' - desc['level_params']['dt'] = 0.1 - desc['step_params']['maxiter'] = 3 - return desc - - def get_LogToFile(self, ranks=None): - from pySDC.implementations.hooks.log_solution import LogToPickleFileAfterXS as LogToFile - - LogToFile.path = './data/' - LogToFile.file_name = f'{self.get_path(ranks=ranks)}-solution' - LogToFile.time_increment = 2e-1 - - def logging_condition(L): - sweep = L.sweep - if hasattr(sweep, 'comm'): - if sweep.comm.rank == sweep.comm.size - 1: - return True - else: - return False - else: - return True - - LogToFile.logging_condition = logging_condition - return LogToFile - args = { + 'config': 'RBC', 'procs': [1, 1, 1], + 'res': 16, + 'mode': 'run', 'useGPU': False, - 'res': -1, + 'o': tmpdir, 'logger_level': 15, - 'restart_idx': restart_idx, - 'mode': 'run', + 'restart_idx': 0, } - config = VdPConfig(args) + config = get_config(args) + type(config).base_path = args['o'] + + def get_LogToFile(self, *args, **kwargs): + if self.comms[1].rank > 0: + return None + from pySDC.implementations.hooks.log_solution import LogToFile + + LogToFile.filename = self.get_file_name() + LogToFile.time_increment = 0 + LogToFile.allow_overwriting = True + + return LogToFile + + type(config).get_LogToFile = get_LogToFile + stats_path = f'{config.base_path}/data/{config.get_path()}-stats-whole-run.pickle' + file_path = config.get_file_name() + + # first run for a short time + dt = config.get_description()['level_params']['dt'] + config.Tend = 2 * dt run_experiment(args, config) - with open(f'data/{config.get_path()}-stats-whole-run.pickle', 'rb') as file: + # check data + data = FieldsIO.fromFile(file_path) + with open(stats_path, 'rb') as file: stats = pickle.load(file) - k_Newton = get_sorted(stats, type='work_newton') - assert len(k_Newton) == 10 - assert sum([me[1] for me in k_Newton]) == 91 + dts = get_sorted(stats, type='dt') + assert len(dts) == len(data.times) - 1 + assert np.allclose(data.times, 0.1 * np.arange(3)), 'Did not record solutions at expected times' + # restart run + args['restart_idx'] = -1 + config.Tend = 4 * dt + run_experiment(args, config) + + # check data + data = FieldsIO.fromFile(file_path) + with open(stats_path, 'rb') as file: + stats = pickle.load(file) + dts = get_sorted(stats, type='dt') -@pytest.mark.order(2) -def test_restart(): - test_run_experiment(3) + assert len(dts) == len(data.times) - 1 + assert np.allclose(data.times, 0.1 * np.arange(5)), 'Did not record solutions at expected times after restart' if __name__ == '__main__': @@ -117,9 +110,5 @@ def test_restart(): if args.test == 'get_comms': test_get_comms(False) - elif args.test == 'run_experiment': - test_run_experiment() - elif args.test == 'restart': - test_restart() else: raise NotImplementedError diff --git a/pySDC/projects/Resilience/RBC.py b/pySDC/projects/Resilience/RBC.py index 94b898cdff..5c7f5bfa63 100644 --- a/pySDC/projects/Resilience/RBC.py +++ b/pySDC/projects/Resilience/RBC.py @@ -14,7 +14,7 @@ import numpy as np -PROBLEM_PARAMS = {'Rayleigh': 3.2e5, 'nx': 256, 'nz': 128, 'max_cached_factorizations': 30} +PROBLEM_PARAMS = {'Rayleigh': 3.2e5, 'nx': 256, 'nz': 128, 'max_cached_factorizations': 30, 'Lx': 8, 'Lz': 4, 'z0': 0} def u_exact(self, t, u_init=None, t_init=None, recompute=False, _t0=None): diff --git a/pySDC/tests/test_hooks/test_log_to_file.py b/pySDC/tests/test_hooks/test_log_to_file.py index f5eb06fe41..2a1ca592f9 100644 --- a/pySDC/tests/test_hooks/test_log_to_file.py +++ b/pySDC/tests/test_hooks/test_log_to_file.py @@ -126,7 +126,10 @@ def test_logging(tmpdir, use_pickle, ODE=True): for us, uf in zip(u, u_file): assert us[0] == uf[0], 'time does not match' - assert np.allclose(us[1], uf[1]), 'solution does not match' + if ODE: + assert np.allclose(us[1], uf[1]), 'solution does not match' + else: + assert np.allclose(us[1], uf[1][:4]), 'solution does not match' @pytest.mark.base diff --git a/pySDC/tests/test_problems/test_RayleighBenard.py b/pySDC/tests/test_problems/test_RayleighBenard.py index c9cbe693c8..d39cf18bd1 100644 --- a/pySDC/tests/test_problems/test_RayleighBenard.py +++ b/pySDC/tests/test_problems/test_RayleighBenard.py @@ -105,23 +105,38 @@ def test_vorticity(nx, nz, direction): @pytest.mark.mpi4py @pytest.mark.parametrize('v', [0, 3.14]) -def test_Nusselt_numbers(v, nx=6, nz=4): +def test_Nusselt_numbers(v, nx=1, nz=10): import numpy as np from pySDC.implementations.problem_classes.RayleighBenard import RayleighBenard BCs = { - 'v_top': v, - 'v_bottom': v, + 'v_top': 0, + 'v_bottom': 0, } - P = RayleighBenard(nx=nx, nz=nz, BCs=BCs, dealiasing=1.5) + P = RayleighBenard(nx=nx, nz=nz, BCs=BCs, dealiasing=1.0, Rayleigh=1, spectral_space=False) + prob = P + xp = prob.xp + iv, iT = prob.index(['v', 'T']) + + u = prob.u_init + u[iT, ...] = prob.Z + Nu = prob.compute_Nusselt_numbers(u) + for key, expect in zip(['t', 'b', 'V'], [-1, -1, -1]): + assert xp.isclose(Nu[key], expect), f'Expected Nu_{key}={expect}, but got {Nu[key]}' - u = P.u_exact(noise_level=0) + u = prob.u_init + u[iT, ...] = 3 * prob.Z**2 + 1 + Nu = prob.compute_Nusselt_numbers(u) + for key, expect in zip(['t', 'b', 'V'], [-6, 0, -3]): + assert xp.isclose(Nu[key], expect), f'Expected Nu_{key}={expect}, but got {Nu[key]}' - nusselt = P.compute_Nusselt_numbers(u) - expect = {'V': 1 + v, 't': 1, 'b': +1 + 2 * v, 'b_no_v': 1, 't_no_v': 1} - for key in nusselt.keys(): - assert np.isclose(nusselt[key], expect[key]), key + u = prob.u_init + u[iT, ...] = 3 * prob.Z**2 + 1 + u[iv] = v * (1 + xp.sin(prob.X / prob.axes[0].L * 2 * xp.pi)) + Nu = prob.compute_Nusselt_numbers(u) + for key, expect in zip(['t', 'b', 'V'], [prob.Lz * (3 + 1) * v - 6, v, v * (1 + 1) - 3]): + assert xp.isclose(Nu[key], expect), f'Expected Nu_{key}={expect}, but got {Nu[key]}' def test_viscous_dissipation(nx=2**5 + 1, nz=2**3 + 1): @@ -182,7 +197,7 @@ def test_Poisson_problems(nx, component): 'T_bottom': 0, } P = RayleighBenard( - nx=nx, nz=6, BCs=BCs, Rayleigh=(max([abs(BCs['T_top'] - BCs['T_bottom']), np.finfo(float).eps]) * 2**3) + nx=nx, nz=6, BCs=BCs, Rayleigh=(max([abs(BCs['T_top'] - BCs['T_bottom']), np.finfo(float).eps]) * 2**3), Lz=2 ) rhs = P.u_init @@ -221,7 +236,7 @@ def test_Poisson_problem_v(): 'v_top': 0, 'v_bottom': 0, 'T_top': 0, - 'T_bottom': 2, + 'T_bottom': 1, } P = RayleighBenard(nx=2, nz=2**3, BCs=BCs, Rayleigh=1.0) iv = P.index('v') @@ -256,7 +271,7 @@ def test_CFL(): from pySDC.implementations.problem_classes.RayleighBenard import RayleighBenard, CFLLimit import numpy as np - P = RayleighBenard(nx=5, nz=2, spectral_space=False) + P = RayleighBenard(nx=5, nz=2, spectral_space=False, Lz=2) iu, iv = P.index(['u', 'v']) u = P.u_init @@ -315,11 +330,11 @@ def test_apply_BCs(): if __name__ == '__main__': # test_eval_f(2**0, 2**2, 'z', True) - # test_Poisson_problem(1, 'T') + # test_Poisson_problems(1, 'T') # test_Poisson_problem_v() # test_apply_BCs() - test_Nusselt_numbers(3.14) + # test_Nusselt_numbers(1) # test_buoyancy_computation() # test_viscous_dissipation() - # test_CFL() + test_CFL() # test_Nyquist_mode_elimination() diff --git a/pySDC/tests/test_problems/test_RayleighBenard3D.py b/pySDC/tests/test_problems/test_RayleighBenard3D.py index 5cd3d7a78b..0105915f5d 100644 --- a/pySDC/tests/test_problems/test_RayleighBenard3D.py +++ b/pySDC/tests/test_problems/test_RayleighBenard3D.py @@ -294,7 +294,7 @@ def test_heterogeneous_implementation(): @pytest.mark.mpi4py @pytest.mark.parametrize('w', [0, 1, 3.14]) -def test_Nusselt_number_computation(w, N=4): +def test_Nusselt_number_computation(w, N=6): from pySDC.implementations.problem_classes.RayleighBenard3D import RayleighBenard3D prob = RayleighBenard3D(nx=N, ny=N, nz=N, dealiasing=1.0, spectral_space=False)