|
4 | 4 | # SPDX-License-Identifier: BSD-3-Clause |
5 | 5 |
|
6 | 6 | import collections.abc |
| 7 | +import json |
| 8 | +import jsonschema |
| 9 | +import os |
7 | 10 | import re |
| 11 | +import tempfile |
8 | 12 |
|
| 13 | +import reframe |
9 | 14 | import reframe.core.debug as debug |
10 | 15 | import reframe.core.fields as fields |
11 | 16 | import reframe.utility as util |
12 | 17 | import reframe.utility.os_ext as os_ext |
13 | 18 | import reframe.utility.typecheck as types |
14 | | -from reframe.core.exceptions import (ConfigError, ReframeFatalError) |
| 19 | +from reframe.core.exceptions import (ConfigError, ReframeError, |
| 20 | + ReframeFatalError) |
15 | 21 |
|
16 | 22 |
|
17 | 23 | _settings = None |
@@ -222,3 +228,147 @@ def create_env(system, partition, name): |
222 | 228 | system.add_partition(part) |
223 | 229 |
|
224 | 230 | self._systems[sys_name] = system |
| 231 | + |
| 232 | + |
| 233 | +def convert_old_config(filename): |
| 234 | + old_config = load_settings_from_file(filename) |
| 235 | + converted = { |
| 236 | + 'systems': [], |
| 237 | + 'environments': [], |
| 238 | + 'logging': [], |
| 239 | + 'perf_logging': [], |
| 240 | + } |
| 241 | + old_systems = old_config.site_configuration['systems'].items() |
| 242 | + for sys_name, sys_specs in old_systems: |
| 243 | + sys_dict = {'name': sys_name} |
| 244 | + sys_dict.update(sys_specs) |
| 245 | + |
| 246 | + # Make variables dictionary into a list of lists |
| 247 | + if 'variables' in sys_specs: |
| 248 | + sys_dict['variables'] = [ |
| 249 | + [vname, v] for vname, v in sys_dict['variables'].items() |
| 250 | + ] |
| 251 | + |
| 252 | + # Make partitions dictionary into a list |
| 253 | + if 'partitions' in sys_specs: |
| 254 | + sys_dict['partitions'] = [] |
| 255 | + for pname, p in sys_specs['partitions'].items(): |
| 256 | + new_p = {'name': pname} |
| 257 | + new_p.update(p) |
| 258 | + if p['scheduler'] == 'nativeslurm': |
| 259 | + new_p['scheduler'] = 'slurm' |
| 260 | + new_p['launcher'] = 'srun' |
| 261 | + elif p['scheduler'] == 'local': |
| 262 | + new_p['scheduler'] = 'local' |
| 263 | + new_p['launcher'] = 'local' |
| 264 | + else: |
| 265 | + sched, launch, *_ = p['scheduler'].split('+') |
| 266 | + new_p['scheduler'] = sched |
| 267 | + new_p['launcher'] = launch |
| 268 | + |
| 269 | + # Make resources dictionary into a list |
| 270 | + if 'resources' in p: |
| 271 | + new_p['resources'] = [ |
| 272 | + {'name': rname, 'options': r} |
| 273 | + for rname, r in p['resources'].items() |
| 274 | + ] |
| 275 | + |
| 276 | + # Make variables dictionary into a list of lists |
| 277 | + if 'variables' in p: |
| 278 | + new_p['variables'] = [ |
| 279 | + [vname, v] for vname, v in p['variables'].items() |
| 280 | + ] |
| 281 | + |
| 282 | + if 'container_platforms' in p: |
| 283 | + new_p['container_platforms'] = [] |
| 284 | + for cname, c in p['container_platforms'].items(): |
| 285 | + new_c = {'name': cname} |
| 286 | + new_c.update(c) |
| 287 | + if 'variables' in c: |
| 288 | + new_c['variables'] = [ |
| 289 | + [vn, v] for vn, v in c['variables'].items() |
| 290 | + ] |
| 291 | + |
| 292 | + new_p['container_platforms'].append(new_c) |
| 293 | + |
| 294 | + sys_dict['partitions'].append(new_p) |
| 295 | + |
| 296 | + converted['systems'].append(sys_dict) |
| 297 | + |
| 298 | + old_environs = old_config.site_configuration['environments'].items() |
| 299 | + for env_target, env_entries in old_environs: |
| 300 | + for ename, e in env_entries.items(): |
| 301 | + new_env = {'name': ename} |
| 302 | + if env_target != '*': |
| 303 | + new_env['target_systems'] = [env_target] |
| 304 | + |
| 305 | + new_env.update(e) |
| 306 | + |
| 307 | + # Convert variables dictionary to a list of lists |
| 308 | + if 'variables' in e: |
| 309 | + new_env['variables'] = [ |
| 310 | + [vname, v] for vname, v in e['variables'].items() |
| 311 | + ] |
| 312 | + |
| 313 | + # Type attribute is not used anymore |
| 314 | + if 'type' in new_env: |
| 315 | + del new_env['type'] |
| 316 | + |
| 317 | + converted['environments'].append(new_env) |
| 318 | + |
| 319 | + if 'modes' in old_config.site_configuration: |
| 320 | + converted['modes'] = [] |
| 321 | + old_modes = old_config.site_configuration['modes'].items() |
| 322 | + for target_mode, mode_entries in old_modes: |
| 323 | + for mname, m in mode_entries.items(): |
| 324 | + new_mode = {'name': mname, 'options': m} |
| 325 | + if target_mode != '*': |
| 326 | + new_mode['target_systems'] = [target_mode] |
| 327 | + |
| 328 | + converted['modes'].append(new_mode) |
| 329 | + |
| 330 | + def update_logging_config(log_name, original_log): |
| 331 | + new_handlers = [] |
| 332 | + for h in original_log['handlers']: |
| 333 | + new_h = h |
| 334 | + new_h['level'] = h['level'].lower() |
| 335 | + new_handlers.append(new_h) |
| 336 | + |
| 337 | + converted[log_name].append( |
| 338 | + { |
| 339 | + 'level': original_log['level'].lower(), |
| 340 | + 'handlers': new_handlers |
| 341 | + } |
| 342 | + ) |
| 343 | + |
| 344 | + update_logging_config('logging', old_config.logging_config) |
| 345 | + update_logging_config('perf_logging', old_config.perf_logging_config) |
| 346 | + converted['general'] = [{}] |
| 347 | + if hasattr(old_config, 'checks_path'): |
| 348 | + converted['general'][0][ |
| 349 | + 'check_search_path' |
| 350 | + ] = old_config.checks_path |
| 351 | + |
| 352 | + if hasattr(old_config, 'checks_path_recurse'): |
| 353 | + converted['general'][0][ |
| 354 | + 'check_search_recursive' |
| 355 | + ] = old_config.checks_path_recurse |
| 356 | + |
| 357 | + if converted['general'] == [{}]: |
| 358 | + del converted['general'] |
| 359 | + |
| 360 | + # Validate the converted file |
| 361 | + schema_filename = os.path.join(reframe.INSTALL_PREFIX, |
| 362 | + 'schemas', 'config.json') |
| 363 | + |
| 364 | + # We let the following statements raise, because if they do, that's a BUG |
| 365 | + with open(schema_filename) as fp: |
| 366 | + schema = json.loads(fp.read()) |
| 367 | + |
| 368 | + jsonschema.validate(converted, schema) |
| 369 | + with tempfile.NamedTemporaryFile(mode='w', delete=False) as fp: |
| 370 | + fp.write(f"#\n# This file was automatically generated " |
| 371 | + f"by ReFrame based on '{filename}'.\n#\n\n") |
| 372 | + fp.write(f'site_configuration = {util.ppretty(converted)}\n') |
| 373 | + |
| 374 | + return fp.name |
0 commit comments