@@ -228,18 +228,7 @@ def record_episode_sample(table_name: str, episode):
228228 "response_len" : episode .response_len ,
229229 "pad_id" : episode .pad_id ,
230230 }
231-
232- print (
233- "[DEBUG] Adding sample to table via record_metric, episode_id: " ,
234- episode .episode_id ,
235- # "episode: ",
236- # episode,
237- )
238231 record_metric (table_name , sample , Reduce .SAMPLE )
239- print (
240- "[DEBUG] Added sample to table via record_metric, episode_id: " ,
241- episode .episode_id ,
242- )
243232
244233
245234#################
@@ -789,7 +778,6 @@ async def flush(
789778
790779 for backend in self .per_rank_reduce_backends :
791780 if scalar_metrics :
792- print (f"[DEBUG] calling log_batch from MetricCollector" )
793781 await backend .log_batch (scalar_metrics , global_step )
794782 if sample_metrics :
795783 await backend .log_samples (sample_metrics , global_step )
@@ -912,7 +900,6 @@ async def init(
912900 async def log_batch (
913901 self , metrics : list [Metric ], global_step : int , * args , ** kwargs
914902 ) -> None :
915- print (f"[DEBUG] calling log_batch with { len (metrics )} metrics" )
916903 metrics_str = "\n " .join (
917904 f" { metric .key } : { metric .value } "
918905 for metric in sorted (metrics , key = lambda m : m .key )
@@ -931,12 +918,10 @@ async def log_samples(self, samples: Dict[str, List[dict]], step: int) -> None:
931918 """Pretty-print sample-level logs to console."""
932919 import json
933920
934- print (f"[DEBUG] calling log_samples with { len (samples )} samples" )
935-
936921 logger .info (f"========== SAMPLE LOGS STEP { step } ==========" )
937922 for table_name , table_rows in samples .items ():
938923 logger .info (f"[{ table_name } ] ({ len (table_rows )} samples)" )
939- logger .info (json .dumps (table_rows , indent = 2 , ensure_ascii = False ))
924+ logger .info (json .dumps (table_rows ))
940925 logger .info ("==============================================\n " )
941926
942927
0 commit comments