44from collections import defaultdict
55from dataclasses import dataclass
66from datetime import datetime , timedelta , timezone
7- from typing import Any , DefaultDict , NamedTuple
7+ from typing import Any , DefaultDict , NamedTuple , NotRequired , TypedDict
88
99import sentry_sdk
1010from celery import Task
5050from sentry .taskworker .namespaces import issues_tasks
5151from sentry .taskworker .retry import Retry
5252from sentry .utils import json , metrics
53- from sentry .utils .dates import ensure_aware
5453from sentry .utils .iterators import chunked
5554from sentry .utils .lazy_service_wrapper import LazyServiceWrapper
5655from sentry .utils .retries import ConditionalRetryPolicy , exponential_delay
@@ -243,25 +242,48 @@ def bulk_fetch_events(event_ids: list[str], project_id: int) -> dict[str, Event]
243242 }
244243
245244
245+ class EventData (TypedDict ):
246+ event_id : str
247+ occurrence_id : NotRequired [str | None ]
248+ start_timestamp : NotRequired [datetime | None ]
249+
250+
246251def parse_rulegroup_to_event_data (
247252 rulegroup_to_event_data : dict [str , str ],
248- ) -> dict [tuple [int , int ], dict [ str , str ] ]:
253+ ) -> dict [tuple [int , int ], EventData ]:
249254 parsed_rulegroup_to_event_data = {}
250255 for rule_group , instance_data in rulegroup_to_event_data .items ():
251256 event_data = json .loads (instance_data )
257+ if ts_string := event_data .get ("start_timestamp" ):
258+ try :
259+ # Handle ISO format with timezone info
260+ event_data ["start_timestamp" ] = datetime .fromisoformat (ts_string )
261+ except (ValueError , TypeError ):
262+ try :
263+ # Fallback to manual parsing if needed
264+ event_data ["start_timestamp" ] = datetime .strptime (
265+ ts_string , "%Y-%m-%dT%H:%M:%S.%fZ"
266+ ).replace (tzinfo = timezone .utc )
267+ except ValueError :
268+ logger .exception (
269+ "delayed_processing.invalid_start_timestamp" ,
270+ extra = {"rule_group" : rule_group , "start_timestamp" : ts_string },
271+ )
272+ del event_data ["start_timestamp" ]
273+
252274 rule_id , group_id = rule_group .split (":" )
253275 parsed_rulegroup_to_event_data [(int (rule_id ), int (group_id ))] = event_data
254276 return parsed_rulegroup_to_event_data
255277
256278
257279def build_group_to_groupevent (
258280 log_config : LogConfig ,
259- parsed_rulegroup_to_event_data : dict [tuple [int , int ], dict [ str , str ] ],
281+ parsed_rulegroup_to_event_data : dict [tuple [int , int ], EventData ],
260282 bulk_event_id_to_events : dict [str , Event ],
261283 bulk_occurrence_id_to_occurrence : dict [str , IssueOccurrence ],
262284 group_id_to_group : dict [int , Group ],
263285 project_id : int ,
264- ) -> dict [Group , GroupEvent ]:
286+ ) -> dict [Group , tuple [ GroupEvent , datetime | None ] ]:
265287
266288 project = fetch_project (project_id )
267289 if project :
@@ -278,11 +300,12 @@ def build_group_to_groupevent(
278300 "project_id" : project_id ,
279301 },
280302 )
281- group_to_groupevent = {}
303+ group_to_groupevent : dict [ Group , tuple [ GroupEvent , datetime | None ]] = {}
282304
283305 for rule_group , instance_data in parsed_rulegroup_to_event_data .items ():
284306 event_id = instance_data .get ("event_id" )
285307 occurrence_id = instance_data .get ("occurrence_id" )
308+ start_timestamp = instance_data .get ("start_timestamp" )
286309
287310 if event_id is None :
288311 logger .info (
@@ -312,16 +335,16 @@ def build_group_to_groupevent(
312335 group_event = event .for_group (group )
313336 if occurrence_id :
314337 group_event .occurrence = bulk_occurrence_id_to_occurrence .get (occurrence_id )
315- group_to_groupevent [group ] = group_event
338+ group_to_groupevent [group ] = ( group_event , start_timestamp )
316339 return group_to_groupevent
317340
318341
319342def get_group_to_groupevent (
320343 log_config : LogConfig ,
321- parsed_rulegroup_to_event_data : dict [tuple [int , int ], dict [ str , str ] ],
344+ parsed_rulegroup_to_event_data : dict [tuple [int , int ], EventData ],
322345 project_id : int ,
323346 group_ids : set [int ],
324- ) -> dict [Group , GroupEvent ]:
347+ ) -> dict [Group , tuple [ GroupEvent , datetime | None ] ]:
325348 groups = Group .objects .filter (id__in = group_ids )
326349 group_id_to_group = {group .id : group for group in groups }
327350
@@ -491,7 +514,7 @@ def get_rules_to_fire(
491514def fire_rules (
492515 log_config : LogConfig ,
493516 rules_to_fire : DefaultDict [Rule , set [int ]],
494- parsed_rulegroup_to_event_data : dict [tuple [int , int ], dict [ str , str ] ],
517+ parsed_rulegroup_to_event_data : dict [tuple [int , int ], EventData ],
495518 alert_rules : list [Rule ],
496519 project : Project ,
497520) -> None :
@@ -511,7 +534,8 @@ def fire_rules(
511534 )
512535 if log_config .num_events_issue_debugging or log_config .workflow_engine_process_workflows :
513536 serialized_groups = {
514- group .id : group_event .event_id for group , group_event in group_to_groupevent .items ()
537+ group .id : group_event .event_id
538+ for group , (group_event , _ ) in group_to_groupevent .items ()
515539 }
516540 logger .info (
517541 "delayed_processing.group_to_groupevent" ,
@@ -574,14 +598,13 @@ def fire_rules(
574598 continue
575599
576600 notification_uuid = str (uuid .uuid4 ())
577- groupevent = group_to_groupevent [group ]
578- metrics .timing (
579- "rule_fire_history.latency" ,
580- (
581- datetime .now (tz = timezone .utc ) - ensure_aware (groupevent .datetime )
582- ).total_seconds (),
583- tags = {"delayed" : True , "group_type" : group .issue_type .slug },
584- )
601+ groupevent , start_timestamp = group_to_groupevent [group ]
602+ if start_timestamp :
603+ metrics .timing (
604+ "rule_fire_history.latency" ,
605+ (datetime .now (tz = timezone .utc ) - start_timestamp ).total_seconds (),
606+ tags = {"delayed" : True , "group_type" : group .issue_type .slug },
607+ )
585608 rule_fire_history = history .record (
586609 rule , group , groupevent .event_id , notification_uuid
587610 )
0 commit comments