@@ -30,7 +30,7 @@ def mangle(self) -> str:
3030
3131
3232class Run (BaseModel ):
33- scramble_id : int
33+ file : int
3434 logic : defs .Logic
3535 cputime_s : float
3636 """ example: 0.211880968s"""
@@ -110,9 +110,9 @@ def parse_result(s: str) -> defs.Answer:
110110 return defs .Answer .Unsat
111111 case "true" :
112112 return defs .Answer .Sat
113- case "unknown" :
113+ case "unknown" | "ERROR" :
114114 return defs .Answer .Unknown
115- case "OUT OF MEMORY" | "OUT OF JAVA MEMORY" :
115+ case "OUT OF MEMORY" | "OUT OF JAVA MEMORY" | "KILLED BY SIGNAL 9" :
116116 return defs .Answer .OOM
117117 case _:
118118 raise ValueError (f"Unknown result value { s } " )
@@ -121,7 +121,8 @@ def parse_result(s: str) -> defs.Answer:
121121def convert_run (r : ET .Element ) -> Run :
122122 parts = r .attrib ["name" ].split ("/" )
123123 logic = defs .Logic (parts [- 2 ])
124- scramble_id = smtcomp .scramble_benchmarks .unscramble_yml_basename (parts [- 1 ])
124+ benchmark_yml = parts [- 1 ]
125+ benchmark_file = int (benchmark_yml .split ("_" , 1 )[0 ])
125126 cputime_s : Optional [float ] = None
126127 memory_B : Optional [int ] = None
127128 answer : Optional [defs .Answer ] = None
@@ -143,7 +144,7 @@ def convert_run(r: ET.Element) -> Run:
143144 raise ValueError ("xml of results doesn't contains some expected column" )
144145
145146 return Run (
146- scramble_id = scramble_id ,
147+ file = benchmark_file ,
147148 logic = logic ,
148149 cputime_s = cputime_s ,
149150 memory_B = memory_B ,
@@ -377,7 +378,7 @@ def parse_mapping(p: Path) -> pl.LazyFrame:
377378
378379def parse_dir (dir : Path ) -> pl .LazyFrame :
379380 """
380- output columns: solver, participation, track, basename, cputime_s, memory_B, status, walltime_s, scramble_id, file
381+ output columns: solver, participation, track, basename, cputime_s, memory_B, status, walltime_s, file
381382
382383 The track stored in the results is *not* used for some decisions:
383384 - if a file mapping.json is present it used and the original_id.csv is not needed
@@ -386,27 +387,20 @@ def parse_dir(dir: Path) -> pl.LazyFrame:
386387
387388 TODO: streamline the results directory hierarchy
388389 """
389- csv = dir / smtcomp .scramble_benchmarks .csv_original_id_name
390- json = dir / json_mapping_name
391- if not csv .exists () and not json .exists ():
392- raise (ValueError (f"No file { csv !s} or { json !s} in the directory" ))
393-
394- if csv .exists ():
395- lf = pl .read_csv (csv ).lazy ()
396- defaults : dict [str , Any ] = {"file" : - 1 }
397- else :
398- lf = parse_mapping (json )
399- defaults = {"unsat_core" : [], "scramble_id_orig" : - 1 }
400-
401390 l = list (dir .glob ("**/*.xml.bz2" ))
402391 if len (l ) == 0 :
403392 raise (ValueError (f"No results in the directory { dir !s} " ))
404393 l_parsed = list (track (map (parse_to_pl , l ), total = len (l )))
405394 results = pl .concat (l_parsed )
406- results = add_columns (results , lf , on = ["scramble_id" ], defaults = defaults )
407395
408396 ucvr = dir / "../unsat_core_validation_results" / "parsed.feather"
409397 if (dir .name ).endswith ("unsatcore" ):
398+ json = dir / json_mapping_name
399+ if not json .exists ():
400+ raise (ValueError (f"No file { json !s} in the directory" ))
401+ lf = parse_mapping (json )
402+ defaults = {"unsat_core" : [], "scramble_id_orig" : - 1 }
403+
410404 if ucvr .is_file ():
411405 vr = pl .read_ipc (ucvr ).lazy ()
412406 vr = (
@@ -466,7 +460,7 @@ def helper_get_results(
466460 )
467461 else :
468462 lf = pl .concat (pl .read_ipc (p / "parsed.feather" ).lazy () for p in results )
469- lf = lf .filter (track = int (track )). drop ( "scramble_id" )
463+ lf = lf .filter (track = int (track ))
470464
471465 selection = smtcomp .selection .helper (config , track ).filter (selected = True ).with_columns (track = int (track ))
472466
@@ -491,7 +485,6 @@ def helper_get_results(
491485 "cputime_s" : 0 ,
492486 "memory_B" : 0 ,
493487 "walltime_s" : 0 ,
494- "nb_answers" : 0 ,
495488 },
496489 )
497490
0 commit comments