@@ -178,38 +178,28 @@ def rebuild_jobs_index(self):
178178 continue
179179 # Prefer the latest snapshot if available; fall back to index.json
180180 index_file = os .path .join (entry_path , "index.json" )
181- latest_txt = os .path .join (entry_path , "latest.txt" )
182181 try :
183- with open (latest_txt , "r" , encoding = "utf-8" ) as lf :
184- latest_name = lf .read ().strip ()
185- candidate = os .path .join (entry_path , latest_name )
186- if os .path .isfile (candidate ):
187- index_file = candidate
188- except Exception :
189- pass
190- if not os .path .isfile (index_file ):
182+ with open (index_file , "r" , encoding = "utf-8" ) as lf :
183+ data = json .load (lf )
184+ except Exception as e :
185+ print (f"Error loading index.json: { e } " )
191186 continue
192-
193- # Check the metadata to see if it belongs to this experiment
194- # Also check for a type parameter, then add to index
195- try :
196- with open (index_file , "r" ) as jf :
197- data = json .load (jf )
198- if data .get ("experiment_id" , "" ) != self .id :
199- continue
200- job_type = data .get ("type" , "UNKNOWN" )
201- results .setdefault (job_type , []).append (entry )
202- except Exception :
187+ if data .get ("experiment_id" , "" ) != self .id :
188+ print (f"Experiment ID mismatch for job { entry } : { data .get ('experiment_id' , '' )} != { self .id } " )
203189 continue
190+ job_type = data .get ("type" , "UNKNOWN" )
191+ results .setdefault (job_type , []).append (entry )
204192
205193 # Write discovered index to jobs.json
206194 if results :
207195 try :
208196 with open (self ._jobs_json_file (), "w" ) as out :
209197 json .dump (results , out , indent = 4 )
210- except Exception :
198+ except Exception as e :
199+ print (f"Error writing jobs index: { e } " )
211200 pass
212- except Exception :
201+ except Exception as e :
202+ print (f"Error rebuilding jobs index: { e } " )
213203 pass
214204 print (results )
215205
0 commit comments