1919 Optional ,
2020 Sequence ,
2121 Tuple ,
22- Type ,
2322 TypedDict ,
2423 Union ,
2524 cast ,
5554from sentry .culprit import generate_culprit
5655from sentry .dynamic_sampling import LatestReleaseBias , LatestReleaseParams
5756from sentry .eventstore .processing import event_processing_store
58- from sentry .eventtypes import (
59- CspEvent ,
60- DefaultEvent ,
61- ErrorEvent ,
62- ExpectCTEvent ,
63- ExpectStapleEvent ,
64- HpkpEvent ,
65- TransactionEvent ,
66- )
57+ from sentry .eventtypes import EventType
58+ from sentry .eventtypes .transaction import TransactionEvent
6759from sentry .grouping .api import (
6860 BackgroundGroupingConfigLoader ,
6961 GroupingConfig ,
@@ -660,7 +652,7 @@ def save_error_events(
660652 with metrics .timer ("event_manager.save_attachments" ):
661653 save_attachments (cache_key , attachments , job )
662654
663- metric_tags = {"from_relay" : "_relay_processed" in job ["data" ]}
655+ metric_tags = {"from_relay" : str ( "_relay_processed" in job ["data" ]) }
664656
665657 metrics .timing (
666658 "events.latency" ,
@@ -1260,13 +1252,15 @@ def _tsdb_record_all_metrics(jobs: Sequence[Job]) -> None:
12601252 records .append ((TSDBModel .users_affected_by_project , project_id , (user .tag_value ,)))
12611253
12621254 if incrs :
1263- tsdb .incr_multi (incrs , timestamp = event .datetime , environment_id = environment .id )
1255+ tsdb .backend . incr_multi (incrs , timestamp = event .datetime , environment_id = environment .id )
12641256
12651257 if records :
1266- tsdb .record_multi (records , timestamp = event .datetime , environment_id = environment .id )
1258+ tsdb .backend .record_multi (
1259+ records , timestamp = event .datetime , environment_id = environment .id
1260+ )
12671261
12681262 if frequencies :
1269- tsdb .record_frequency_multi (frequencies , timestamp = event .datetime )
1263+ tsdb .backend . record_frequency_multi (frequencies , timestamp = event .datetime )
12701264
12711265
12721266@metrics .wraps ("save_event.nodestore_save_many" )
@@ -1438,17 +1432,6 @@ def _get_event_user_impl(
14381432 return euser
14391433
14401434
1441- EventType = Union [
1442- DefaultEvent ,
1443- ErrorEvent ,
1444- CspEvent ,
1445- HpkpEvent ,
1446- ExpectCTEvent ,
1447- ExpectStapleEvent ,
1448- TransactionEvent ,
1449- ]
1450-
1451-
14521435def get_event_type (data : Mapping [str , Any ]) -> EventType :
14531436 return eventtypes .get (data .get ("type" , "default" ))()
14541437
@@ -1922,7 +1905,7 @@ def _process_existing_aggregate(
19221905 return bool (is_regression )
19231906
19241907
1925- Attachment = Type [ CachedAttachment ]
1908+ Attachment = CachedAttachment
19261909
19271910
19281911def discard_event (job : Job , attachments : Sequence [Attachment ]) -> None :
@@ -1938,7 +1921,7 @@ def discard_event(job: Job, attachments: Sequence[Attachment]) -> None:
19381921
19391922 project = job ["event" ].project
19401923
1941- quotas .refund (
1924+ quotas .backend . refund (
19421925 project ,
19431926 key = job ["project_key" ],
19441927 timestamp = job ["start_time" ],
@@ -1975,7 +1958,7 @@ def discard_event(job: Job, attachments: Sequence[Attachment]) -> None:
19751958 )
19761959
19771960 if attachment_quantity :
1978- quotas .refund (
1961+ quotas .backend . refund (
19791962 project ,
19801963 key = job ["project_key" ],
19811964 timestamp = job ["start_time" ],
@@ -2099,7 +2082,7 @@ def filter_attachments_for_group(attachments: list[Attachment], job: Job) -> lis
20992082 cache .set (crashreports_key , max_crashreports , CRASH_REPORT_TIMEOUT )
21002083
21012084 if refund_quantity :
2102- quotas .refund (
2085+ quotas .backend . refund (
21032086 project ,
21042087 key = job ["project_key" ],
21052088 timestamp = job ["start_time" ],
0 commit comments