Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
88 changes: 58 additions & 30 deletions pySDC/projects/GPU/analysis_scripts/parallel_scaling.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,13 +81,17 @@ def run_scaling_test(self, **kwargs):
**kwargs,
)

def plot_scaling_test(self, ax, quantity='time', **plotting_params): # pragma: no cover
def plot_scaling_test(self, ax, quantity='time', space_time=None, **plotting_params): # pragma: no cover
from matplotlib.colors import TABLEAU_COLORS

cmap = TABLEAU_COLORS
colors = list(cmap.values())

for experiment in self.experiments:
if space_time is not None:
if not experiment.PinT == space_time:
continue

tasks_time = self.tasks_time if experiment.PinT else 1
timings = {}

Expand Down Expand Up @@ -141,20 +145,30 @@ def plot_scaling_test(self, ax, quantity='time', **plotting_params): # pragma:
elif quantity == 'throughput_per_task':
timings[np.prod(procs)] = experiment.res**self.ndim / t_mean
elif quantity == 'efficiency':
if type(config).__name__ == 'GrayScottScaling3D':
norm = 13216322.909
else:
norm = 1
timings[np.prod(procs) / self.tasks_per_node] = (
experiment.res**self.ndim / t_mean / np.prod(procs)
experiment.res**self.ndim / t_mean / np.prod(procs) / norm
)
elif quantity == 'time':
timings[np.prod(procs) / self.tasks_per_node] = t_mean
elif quantity == 'time_per_task':
timings[np.prod(procs)] = t_mean
elif quantity == 'min_time_per_task':
timings[np.prod(procs)] = t_min
elif quantity == 'min_time':
timings[np.prod(procs) / self.tasks_per_node] = t_min
else:
raise NotImplementedError
except (FileNotFoundError, ValueError):
pass

if quantity == 'efficiency' and type(config).__name__ == 'RayleighBenard_scaling':
norm = max(timings.values())
timings = {key: value / norm for key, value in timings.items()}

ax.loglog(
timings.keys(),
timings.values(),
Expand All @@ -171,7 +185,8 @@ def plot_scaling_test(self, ax, quantity='time', **plotting_params): # pragma:
'time': r'$t_\mathrm{step}$ / s',
'time_per_task': r'$t_\mathrm{step}$ / s',
'min_time_per_task': r'minimal $t_\mathrm{step}$ / s',
'efficiency': 'efficiency / DoF/s/task',
'min_time': r'minimal $t_\mathrm{step}$ / s',
'efficiency': r'parallel efficiency / \%',
}
ax.set_ylabel(labels[quantity])

Expand Down Expand Up @@ -331,17 +346,28 @@ class RayleighBenardDedalusComparisonGPU(GPUConfig, ScalingConfig):
]


def plot_scalings(problem, **kwargs): # pragma: no cover
def plot_scalings(problem, XPU=None, space_time=None, **kwargs): # pragma: no cover
if problem == 'GS3D':
configs = [
GrayScottSpaceScalingCPU3D(),
GrayScottSpaceScalingGPU3D(),
]
if XPU == 'CPU':
configs = [GrayScottSpaceScalingCPU3D()]
elif XPU == 'GPU':
configs = [GrayScottSpaceScalingGPU3D()]
else:
configs = [GrayScottSpaceScalingCPU3D(), GrayScottSpaceScalingGPU3D()]
elif problem == 'RBC':
configs = [
RayleighBenardSpaceScalingGPU(),
RayleighBenardSpaceScalingCPU(),
]
if XPU == 'CPU':
configs = [
RayleighBenardSpaceScalingCPU(),
]
elif XPU == 'GPU':
configs = [
RayleighBenardSpaceScalingGPU(),
]
else:
configs = [
RayleighBenardSpaceScalingGPU(),
RayleighBenardSpaceScalingCPU(),
]
elif problem == 'RBC_dedalus':
configs = [
RayleighBenardDedalusComparison(),
Expand All @@ -358,31 +384,26 @@ def plot_scalings(problem, **kwargs): # pragma: no cover
('RBC', 'time'): {'x': [1 / 10, 64], 'y': [60, 60 / 640]},
('RBC', 'time_per_task'): {'x': [1, 640], 'y': [60, 60 / 640]},
('RBC', 'min_time_per_task'): {'x': [1, 640], 'y': [60, 60 / 640]},
('RBC', 'min_time'): {'x': [1, 640], 'y': [60, 60 / 640]},
('RBC', 'throughput_per_task'): {'x': [1 / 1, 640], 'y': [2e4, 2e4 * 640]},
}

fig, ax = plt.subplots(figsize=figsize_by_journal('TUHH_thesis', 1, 0.6))
configs[1].plot_scaling_test(ax=ax, quantity='efficiency')
# ax.legend(frameon=False)
box = ax.get_position()
ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))

ax.set_yscale('linear')
path = f'{PROJECT_PATH}/plots/scaling_{problem}_efficiency.pdf'
fig.savefig(path, bbox_inches='tight')
print(f'Saved {path!r}', flush=True)

for quantity in ['time', 'throughput', 'time_per_task', 'throughput_per_task', 'min_time_per_task'][::-1]:
for quantity in ['time', 'throughput', 'time_per_task', 'throughput_per_task', 'min_time_per_task', 'efficiency'][
::-1
]:
fig, ax = plt.subplots(figsize=figsize_by_journal('TUHH_thesis', 1, 0.6))
for config in configs:
config.plot_scaling_test(ax=ax, quantity=quantity)
config.plot_scaling_test(ax=ax, quantity=quantity, space_time=space_time)
if (problem, quantity) in ideal_lines.keys():
ax.loglog(*ideal_lines[(problem, quantity)].values(), color='black', ls=':', label='ideal')
elif quantity == 'efficiency':
ax.axhline(1, color='black', ls=':', label='ideal')
ax.set_yscale('linear')
ax.set_ylim(0, 1.1)
box = ax.get_position()
ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
path = f'{PROJECT_PATH}/plots/scaling_{problem}_{quantity}.pdf'
path = f'{PROJECT_PATH}/plots/scaling_{problem}_{quantity}_{XPU}_{space_time}.pdf'
fig.savefig(path, bbox_inches='tight')
print(f'Saved {path!r}', flush=True)

Expand All @@ -393,8 +414,8 @@ def plot_scalings(problem, **kwargs): # pragma: no cover
parser = argparse.ArgumentParser()
parser.add_argument('--mode', type=str, choices=['run', 'plot'], default='run')
parser.add_argument('--problem', type=str, default='GS')
parser.add_argument('--XPU', type=str, choices=['CPU', 'GPU'], default='CPU')
parser.add_argument('--space_time', type=str, choices=['True', 'False'], default='False')
parser.add_argument('--XPU', type=str, choices=['CPU', 'GPU', 'both'], default='CPU')
parser.add_argument('--space_time', type=str, choices=['True', 'False', 'None'], default='False')
parser.add_argument('--submit', type=str, choices=['True', 'False'], default='True')
parser.add_argument('--nsys_profiling', type=str, choices=['True', 'False'], default='False')

Expand All @@ -403,6 +424,13 @@ def plot_scalings(problem, **kwargs): # pragma: no cover
submit = args.submit == 'True'
nsys_profiling = args.nsys_profiling == 'True'

if args.space_time == 'True':
space_time = True
elif args.space_time == 'False':
space_time = False
else:
space_time = None

config_classes = []

if args.problem == 'GS3D':
Expand All @@ -429,6 +457,6 @@ def plot_scalings(problem, **kwargs): # pragma: no cover
if args.mode == 'run':
config.run_scaling_test(submit=submit, nsys_profiling=nsys_profiling)
elif args.mode == 'plot':
plot_scalings(problem=args.problem)
plot_scalings(problem=args.problem, XPU=args.XPU, space_time=space_time)
else:
raise NotImplementedError(f'Don\'t know mode {args.mode!r}')
8 changes: 6 additions & 2 deletions pySDC/projects/GPU/paper_plots.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,15 +54,19 @@ def plot_scalings_separately(problem, journal='TUHH_thesis', **kwargs): # pragm

def make_plots_for_thesis(): # pragma: no cover
from pySDC.projects.GPU.analysis_scripts.plot_RBC_matrix import plot_DCT, plot_preconditioners, plot_ultraspherical
from pySDC.projects.GPU.analysis_scripts.parallel_scaling import plot_scalings

# small plots with no simulations
plot_DCT()
plot_preconditioners()
plot_ultraspherical()

# plot space-time parallel scaling
for problem in ['GS3D', 'RBC']:
plot_scalings_separately(problem=problem)
plot_scalings(problem='GS3D', XPU='both', space_time=False)
plot_scalings(problem='GS3D', XPU='GPU', space_time=None)
plot_scalings(problem='RBC', XPU='both', space_time=False)
plot_scalings(problem='RBC', XPU='GPU', space_time=None)
plot_scalings(problem='RBC', XPU='CPU', space_time=None)


if __name__ == '__main__':
Expand Down
1 change: 1 addition & 0 deletions pySDC/projects/Resilience/paper_plots.py
Original file line number Diff line number Diff line change
Expand Up @@ -813,6 +813,7 @@ def plot_recovery_rate_per_acceptance_threshold(problem, target='resilience'):
else:
fig, ax = plt.subplots(figsize=figsize_by_journal(JOURNAL, 0.8, 0.5))

ax.axvline(1.1, color='grey', ls=':', label='1.1')
stats_analyser.plot_recovery_thresholds(thresh_range=np.logspace(-1, 4, 500), recoverable_only=False, ax=ax)
ax.set_xscale('log')
ax.set_ylim((-0.05, 1.05))
Expand Down