99import reframe .utility .sanity as sn
1010
1111
12- class NamdBaseCheck (rfm .RunOnlyRegressionTest ):
13- def __init__ (self , arch , scale , variant ):
14- self .descr = f'NAMD check ({ arch } , { variant } )'
15- if self .current_system .name in ['eiger' , 'pilatus' ]:
16- self .valid_prog_environs = ['cpeGNU' ]
17- else :
18- self .valid_prog_environs = ['builtin' ]
12+ @rfm .simple_test
13+ class NamdCheck (rfm .RunOnlyRegressionTest ):
14+ scale = parameter (['small' , 'large' ])
15+ variant = parameter (['maint' , 'prod' ])
16+ arch = parameter (['gpu' , 'cpu' ])
1917
20- self .modules = ['NAMD' ]
18+ valid_prog_environs = ['builtin' , 'cpeIntel' ]
19+ modules = ['NAMD' ]
20+ executable = 'namd2'
21+ use_multithreading = True
22+ num_tasks_per_core = 2
23+ maintainers = ['CB' , 'LM' ]
24+ tags = {'scs' , 'external-resources' }
25+ extra_resources = {
26+ 'switches' : {
27+ 'num_switches' : 1
28+ }
29+ }
2130
22- # Reset sources dir relative to the SCS apps prefix
23- self .sourcesdir = os .path .join (self .current_system .resourcesdir ,
24- 'NAMD' , 'prod' )
25- self .executable = 'namd2'
26- self .use_multithreading = True
27- self .num_tasks_per_core = 2
31+ @run_after ('init' )
32+ def adapt_description (self ):
33+ self .descr = f'NAMD check ({ self .arch } , { self .variant } )'
34+ self .tags |= {
35+ 'maintenance' if self .variant == 'maint' else 'production'
36+ }
37+
38+ @run_after ('init' )
39+ def adapt_valid_systems (self ):
40+ if self .arch == 'gpu' :
41+ self .valid_systems = ['daint:gpu' ]
42+ if self .scale == 'small' :
43+ self .valid_systems += ['dom:gpu' ]
44+ else :
45+ self .valid_systems = ['daint:mc' , 'eiger:mc' , 'pilatus:mc' ]
46+ if self .scale == 'small' :
47+ self .valid_systems += ['dom:mc' ]
2848
29- if scale == 'small' :
49+ @run_after ('init' )
50+ def adapt_valid_prog_environs (self ):
51+ if self .current_system .name == 'pilatus' :
52+ self .valid_prog_environs .remove ('builtin' )
53+ else :
54+ self .valid_prog_environs .remove ('cpeIntel' )
55+
56+ @run_after ('init' )
57+ def setup_parallel_run (self ):
58+ if self .arch == 'gpu' :
59+ self .executable_opts = ['+idlepoll' , '+ppn 23' , 'stmv.namd' ]
60+ self .num_cpus_per_task = 24
61+ self .num_gpus_per_node = 1
62+ else :
63+ # On Eiger a no-smp NAMD version is the default
64+ if self .current_system .name in ['eiger' , 'pilatus' ]:
65+ self .executable_opts = ['+idlepoll' , 'stmv.namd' ]
66+ else :
67+ self .executable_opts = ['+idlepoll' , '+ppn 71' , 'stmv.namd' ]
68+ self .num_cpus_per_task = 72
69+ if self .scale == 'small' :
3070 # On Eiger a no-smp NAMD version is the default
3171 if self .current_system .name in ['eiger' , 'pilatus' ]:
3272 self .num_tasks = 768
@@ -42,85 +82,56 @@ def __init__(self, arch, scale, variant):
4282 self .num_tasks = 16
4383 self .num_tasks_per_node = 1
4484
85+ @run_before ('compile' )
86+ def prepare_build (self ):
87+ # Reset sources dir relative to the SCS apps prefix
88+ self .sourcesdir = os .path .join (self .current_system .resourcesdir ,
89+ 'NAMD' , 'prod' )
90+
91+ @sanity_function
92+ def validate_energy (self ):
4593 energy = sn .avg (sn .extractall (
4694 r'ENERGY:([ \t]+\S+){10}[ \t]+(?P<energy>\S+)' ,
4795 self .stdout , 'energy' , float )
4896 )
4997 energy_reference = - 2451359.5
5098 energy_diff = sn .abs (energy - energy_reference )
51- self . sanity_patterns = sn .all ([
99+ return sn .all ([
52100 sn .assert_eq (sn .count (sn .extractall (
53101 r'TIMING: (?P<step_num>\S+) CPU:' ,
54102 self .stdout , 'step_num' )), 50 ),
55103 sn .assert_lt (energy_diff , 2720 )
56104 ])
57105
106+ @run_before ('performance' )
107+ def setup_perf_vars (self ):
58108 self .perf_patterns = {
59109 'days_ns' : sn .avg (sn .extractall (
60110 r'Info: Benchmark time: \S+ CPUs \S+ '
61111 r's/step (?P<days_ns>\S+) days/ns \S+ MB memory' ,
62112 self .stdout , 'days_ns' , float ))
63113 }
64-
65- self .maintainers = ['CB' , 'LM' ]
66- self .tags = {'scs' , 'external-resources' }
67- self .extra_resources = {
68- 'switches' : {
69- 'num_switches' : 1
70- }
71- }
72-
73-
74- @rfm .parameterized_test (* ([s , v ]
75- for s in ['small' , 'large' ]
76- for v in ['maint' , 'prod' ]))
77- class NamdGPUCheck (NamdBaseCheck ):
78- def __init__ (self , scale , variant ):
79- super ().__init__ ('gpu' , scale , variant )
80- self .valid_systems = ['daint:gpu' ]
81- self .executable_opts = ['+idlepoll' , '+ppn 23' , 'stmv.namd' ]
82- self .num_cpus_per_task = 24
83- self .num_gpus_per_node = 1
84- self .tags |= {'maintenance' if variant == 'maint' else 'production' }
85- if scale == 'small' :
86- self .valid_systems += ['dom:gpu' ]
87- self .reference = {
88- 'dom:gpu' : {'days_ns' : (0.15 , None , 0.05 , 'days/ns' )},
89- 'daint:gpu' : {'days_ns' : (0.15 , None , 0.05 , 'days/ns' )}
90- }
91- else :
92- self .reference = {
93- 'daint:gpu' : {'days_ns' : (0.07 , None , 0.05 , 'days/ns' )}
94- }
95-
96-
97- @rfm .parameterized_test (* ([s , v ]
98- for s in ['small' , 'large' ]
99- for v in ['maint' , 'prod' ]))
100- class NamdCPUCheck (NamdBaseCheck ):
101- def __init__ (self , scale , variant ):
102- super ().__init__ ('cpu' , scale , variant )
103- self .valid_systems = ['daint:mc' , 'eiger:mc' , 'pilatus:mc' ]
104- # On Eiger a no-smp NAMD version is the default
105- if self .current_system .name in ['eiger' , 'pilatus' ]:
106- self .executable_opts = ['+idlepoll' , 'stmv.namd' ]
107- self .num_tasks_per_core = 2
108- else :
109- self .executable_opts = ['+idlepoll' , '+ppn 71' , 'stmv.namd' ]
110- self .num_cpus_per_task = 72
111- if scale == 'small' :
112- self .valid_systems += ['dom:mc' ]
113- self .reference = {
114- 'dom:mc' : {'days_ns' : (0.51 , None , 0.05 , 'days/ns' )},
115- 'daint:mc' : {'days_ns' : (0.51 , None , 0.05 , 'days/ns' )},
116- 'eiger:mc' : {'days_ns' : (0.12 , None , 0.05 , 'days/ns' )},
117- 'pilatus:mc' : {'days_ns' : (0.12 , None , 0.05 , 'days/ns' )},
118- }
114+ if self .arch == 'gpu' :
115+ if self .scale == 'small' :
116+ self .reference = {
117+ 'dom:gpu' : {'days_ns' : (0.15 , None , 0.05 , 'days/ns' )},
118+ 'daint:gpu' : {'days_ns' : (0.15 , None , 0.05 , 'days/ns' )}
119+ }
120+ else :
121+ self .reference = {
122+ 'daint:gpu' : {'days_ns' : (0.07 , None , 0.05 , 'days/ns' )}
123+ }
119124 else :
120- self .reference = {
121- 'daint:mc' : {'days_ns' : (0.28 , None , 0.05 , 'days/ns' )},
122- 'eiger:mc' : {'days_ns' : (0.05 , None , 0.05 , 'days/ns' )},
123- 'pilatus:mc' : {'days_ns' : (0.05 , None , 0.05 , 'days/ns' )}
124- }
125-
126- self .tags |= {'maintenance' if variant == 'maint' else 'production' }
125+ if self .scale == 'small' :
126+ self .reference = {
127+ 'dom:mc' : {'days_ns' : (0.51 , None , 0.05 , 'days/ns' )},
128+ 'daint:mc' : {'days_ns' : (0.51 , None , 0.05 , 'days/ns' )},
129+ 'eiger:mc' : {'days_ns' : (0.12 , None , 0.05 , 'days/ns' )},
130+ 'pilatus:mc' : {'days_ns' : (0.12 , None , 0.05 , 'days/ns' )},
131+ }
132+ else :
133+ self .reference = {
134+ 'daint:mc' : {'days_ns' : (0.28 , None , 0.05 , 'days/ns' )},
135+ 'eiger:mc' : {'days_ns' : (0.05 , None , 0.05 , 'days/ns' )},
136+ 'pilatus:mc' : {'days_ns' : (0.05 , None , 0.05 , 'days/ns' )}
137+ }
0 commit comments