66"""
77
88import json
9+ from datetime import datetime , timezone
910import time
1011import traceback
1112from collections import Counter
@@ -59,18 +60,22 @@ def _summarize_tasks(task_details: list[dict[str, Any]]) -> TaskSummary:
5960 waiting = counter .get ("waiting" , 0 ),
6061 in_progress = counter .get ("in_progress" , 0 ),
6162 completed = counter .get ("completed" , 0 ),
63+ pending = counter .get ("pending" , counter .get ("in_progress" , 0 )),
6264 failed = counter .get ("failed" , 0 ),
6365 cancelled = counter .get ("cancelled" , 0 ),
6466 total = total ,
6567 )
6668
67- def _aggregate_counts_from_redis (tracker : TaskStatusTracker ) -> TaskSummary | None :
69+ def _aggregate_counts_from_redis (
70+ tracker : TaskStatusTracker , max_age_seconds : float = 86400
71+ ) -> TaskSummary | None :
6872 """Stream status counts directly from Redis to avoid loading all task payloads."""
6973 redis_client = getattr (tracker , "redis" , None )
7074 if not redis_client :
7175 return None
7276
7377 counter = Counter ()
78+ now = datetime .now (timezone .utc ).timestamp ()
7479
7580 # Scan task_meta keys, then hscan each hash in batches
7681 cursor : int | str = 0
@@ -83,6 +88,16 @@ def _aggregate_counts_from_redis(tracker: TaskStatusTracker) -> TaskSummary | No
8388 for value in fields .values ():
8489 try :
8590 payload = json .loads (value .decode ("utf-8" ) if isinstance (value , bytes ) else value )
91+ # Skip stale entries to reduce noise and load
92+ ts = payload .get ("submitted_at" ) or payload .get ("started_at" )
93+ if ts :
94+ try :
95+ ts_dt = datetime .fromisoformat (ts )
96+ ts_seconds = ts_dt .timestamp ()
97+ except Exception :
98+ ts_seconds = None
99+ if ts_seconds and (now - ts_seconds ) > max_age_seconds :
100+ continue
86101 status = payload .get ("status" )
87102 if status :
88103 counter [status ] += 1
@@ -101,6 +116,7 @@ def _aggregate_counts_from_redis(tracker: TaskStatusTracker) -> TaskSummary | No
101116 waiting = counter .get ("waiting" , 0 ),
102117 in_progress = counter .get ("in_progress" , 0 ),
103118 completed = counter .get ("completed" , 0 ),
119+ pending = counter .get ("pending" , counter .get ("in_progress" , 0 )),
104120 failed = counter .get ("failed" , 0 ),
105121 cancelled = counter .get ("cancelled" , 0 ),
106122 total = total ,
@@ -120,6 +136,7 @@ def _aggregate_counts_from_redis(tracker: TaskStatusTracker) -> TaskSummary | No
120136 # Scheduler view: assume tracker contains scheduler tasks; overlay queue monitor for live queue depth
121137 sched_waiting = all_tasks_summary .waiting
122138 sched_in_progress = all_tasks_summary .in_progress
139+ sched_pending = all_tasks_summary .pending
123140 sched_completed = all_tasks_summary .completed
124141 sched_failed = all_tasks_summary .failed
125142 sched_cancelled = all_tasks_summary .cancelled
@@ -129,17 +146,21 @@ def _aggregate_counts_from_redis(tracker: TaskStatusTracker) -> TaskSummary | No
129146 queue_status_data = mem_scheduler .task_schedule_monitor .get_tasks_status () or {}
130147 scheduler_waiting = 0
131148 scheduler_in_progress = 0
149+ scheduler_pending = 0
132150 for key , value in queue_status_data .items ():
133151 if not key .startswith ("scheduler:" ):
134152 continue
135153 scheduler_in_progress += int (value .get ("running" , 0 ) or 0 )
154+ scheduler_pending += int (value .get ("pending" , value .get ("running" , 0 )) or 0 )
136155 scheduler_waiting += int (value .get ("remaining" , 0 ) or 0 )
137156 sched_waiting = scheduler_waiting
138157 sched_in_progress = scheduler_in_progress
158+ sched_pending = scheduler_pending
139159
140160 scheduler_summary = TaskSummary (
141161 waiting = sched_waiting ,
142162 in_progress = sched_in_progress ,
163+ pending = sched_pending ,
143164 completed = sched_completed ,
144165 failed = sched_failed ,
145166 cancelled = sched_cancelled ,
0 commit comments