Skip to content

Commit ae47cf9

Browse files
author
Thomas Baumann
committed
Changed configurations
#!!!!!! WARNING: FLAKEHEAVEN FAILED !!!!!!: #:
1 parent 579478a commit ae47cf9

File tree

2 files changed

+31
-27
lines changed

2 files changed

+31
-27
lines changed

pySDC/projects/GPU/analysis_scripts/parallel_scaling.py

Lines changed: 30 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,8 @@
44
from pySDC.helpers.stats_helper import get_sorted
55
from pySDC.projects.GPU.configs.base_config import get_config
66
from pySDC.projects.GPU.etc.generate_jobscript import write_jobscript, PROJECT_PATH
7+
from pySDC.helpers.plot_helper import setup_mpl, figsize_by_journal
8+
setup_mpl()
79

810

911
class ScalingConfig(object):
@@ -30,19 +32,18 @@ def get_resolution_and_tasks(self, strong, i):
3032
if strong:
3133
return self.base_resolution, [1, self._tasks_time, 2**i]
3234
else:
33-
return self.base_resolution_weak * (2**i), [1, self._tasks_time, (2 * self.ndim) ** i]
35+
return self.base_resolution_weak * int(self._tasks_time**(1./self.ndim)) * (2**i), [1, self._tasks_time, (2 * self.ndim) ** i]
3436

3537
def run_scaling_test(self, strong=True):
3638
max_steps = self.max_steps_space if strong else self.max_steps_space_weak
3739
for i in range(max_steps):
3840
res, procs = self.get_resolution_and_tasks(strong, i)
3941

40-
sbatch_options = [f'-n {np.prod(procs)}', f'-p {self.partition}'] + self.sbatch_options
42+
sbatch_options = [f'-n {np.prod(procs)}', f'-p {self.partition}', f'--tasks-per-node={self.tasks_per_node}'] + self.sbatch_options
43+
srun_options = [f'--tasks-per-node={self.tasks_per_node}']
4144
if self.useGPU:
42-
srun_options = ['--cpus-per-task=4', '--gpus-per-task=1'] + self.sbatch_options
45+
srun_options += ['--cpus-per-task=4', '--gpus-per-task=1']
4346
sbatch_options += ['--cpus-per-task=4', '--gpus-per-task=1']
44-
else:
45-
srun_options = []
4647

4748
procs = (''.join(f'{me}/' for me in procs))[:-1]
4849
command = f'run_experiment.py --mode=run --res={res} --config={self.config} --procs={procs}'
@@ -69,20 +70,20 @@ def plot_scaling_test(self, strong, ax, plot_ideal=False, **plotting_params): #
6970
stats = pickle.load(file)
7071

7172
timing_step = get_sorted(stats, type='timing_step')
72-
7373
timings[np.prod(procs) / self.tasks_per_node] = np.mean([me[1] for me in timing_step])
7474
except FileNotFoundError:
7575
pass
7676

77-
ax.loglog(timings.keys(), timings.values(), **plotting_params)
7877
if plot_ideal:
79-
ax.loglog(
80-
timings.keys(),
81-
list(timings.values())[0] * list(timings.keys())[0] / np.array(list(timings.keys())),
82-
ls='--',
83-
color='grey',
84-
label='ideal',
85-
)
78+
if strong:
79+
ax.loglog(
80+
timings.keys(),
81+
list(timings.values())[0] * list(timings.keys())[0] / np.array(list(timings.keys())),
82+
ls='--',
83+
color='grey',
84+
label='ideal',
85+
)
86+
ax.loglog(timings.keys(), timings.values(), **plotting_params)
8687
ax.set_xlabel(r'$N_\mathrm{nodes}$')
8788
ax.set_ylabel(r'$t_\mathrm{step}$')
8889

@@ -91,7 +92,6 @@ class CPUConfig(ScalingConfig):
9192
cluster = 'jusuf'
9293
partition = 'batch'
9394
tasks_per_node = 16
94-
sbatch_options = ['--tasks-per-node=16']
9595

9696

9797
class GPUConfig(ScalingConfig):
@@ -102,36 +102,39 @@ class GPUConfig(ScalingConfig):
102102

103103

104104
class GrayScottSpaceScalingCPU(CPUConfig, ScalingConfig):
105-
base_resolution = 4096
106-
base_resolution_weak = 256
105+
base_resolution = 8192
106+
base_resolution_weak = 512
107107
config = 'GS_scaling'
108-
max_steps_space = 10
109-
max_steps_space_weak = 6
110-
tasks_time = 3
108+
max_steps_space = 11
109+
max_steps_space_weak = 11
110+
tasks_time = 4
111+
sbatch_options = ['--time=3:30:00']
111112

112113

113114
class GrayScottSpaceScalingGPU(GPUConfig, ScalingConfig):
114-
base_resolution_weak = 256 * 2
115-
base_resolution = 4096
115+
base_resolution_weak = 1024
116+
base_resolution = 8192
116117
config = 'GS_scaling'
117-
max_steps_space = 6
118+
max_steps_space = 7
118119
max_steps_space_weak = 4
119-
tasks_time = 3
120+
tasks_time = 4
120121

121122

122123
def plot_scalings(strong, problem, kwargs): # pragma: no cover
123124
if problem == 'GS':
124-
fig, ax = plt.subplots()
125+
fig, ax = plt.subplots(figsize=figsize_by_journal('JSC_beamer', 1, 0.45))
125126

126127
plottings_params = [
127-
{'plot_ideal': strong, 'marker': 'x', 'label': 'CPU'},
128+
{'plot_ideal': True, 'marker': 'x', 'label': 'CPU space parallel'},
128129
{'marker': '>', 'label': 'CPU space time parallel'},
129-
{'marker': '^', 'label': 'GPU'},
130+
{'marker': '^', 'label': 'GPU space parallel'},
131+
{'marker': '<', 'label': 'GPU space time parallel'},
130132
]
131133
configs = [
132134
GrayScottSpaceScalingCPU(space_time_parallel=False),
133135
GrayScottSpaceScalingCPU(space_time_parallel=True),
134136
GrayScottSpaceScalingGPU(space_time_parallel=False),
137+
GrayScottSpaceScalingGPU(space_time_parallel=True),
135138
]
136139

137140
for config, params in zip(configs, plottings_params):

pySDC/projects/GPU/configs/GS_configs.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -201,6 +201,7 @@ def get_description(self, *args, **kwargs):
201201
desc['problem_params']['L'] = 2
202202
desc['problem_params']['num_blobs'] = 4
203203
desc['sweeper_params']['skip_residual_computation'] = ('IT_CHECK', 'IT_DOWN', 'IT_UP', 'IT_FINE', 'IT_COARSE')
204+
desc['sweeper_params']['num_nodes'] = 4
204205
self.Tend = 50 * desc['level_params']['dt']
205206
return desc
206207

0 commit comments

Comments
 (0)