Skip to content

Commit dcceba4

Browse files
asottile-sentryandrewshie-sentry
authored andcommitted
ref: fix remaining [index] mypy errors (#87597)
<!-- Describe your PR here. -->
1 parent 95603e7 commit dcceba4

File tree

13 files changed

+77
-79
lines changed

13 files changed

+77
-79
lines changed

pyproject.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -190,7 +190,6 @@ disable_error_code = [
190190
"assignment",
191191
"attr-defined",
192192
"call-overload",
193-
"index",
194193
"misc",
195194
"operator",
196195
"override",

src/sentry/api/endpoints/organization_releases.py

Lines changed: 11 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -88,21 +88,20 @@ def _filter_releases_by_query(queryset, organization, query, filter_params):
8888

8989
if search_filter.key.name == RELEASE_ALIAS:
9090
query_q = Q()
91-
raw_value = search_filter.value.raw_value
92-
if search_filter.value.is_wildcard():
93-
if raw_value.endswith("*") and raw_value.startswith("*"):
94-
query_q = Q(version__contains=raw_value[1:-1])
95-
elif raw_value.endswith("*"):
96-
query_q = Q(version__startswith=raw_value[:-1])
97-
elif raw_value.startswith("*"):
98-
query_q = Q(version__endswith=raw_value[1:])
91+
kind, value_o = search_filter.value.classify_and_format_wildcard()
92+
if kind == "infix":
93+
query_q = Q(version__contains=value_o)
94+
elif kind == "suffix":
95+
query_q = Q(version__endswith=value_o)
96+
elif kind == "prefix":
97+
query_q = Q(version__startswith=value_o)
9998
elif search_filter.operator == "!=":
100-
query_q = ~Q(version=search_filter.value.value)
99+
query_q = ~Q(version=value_o)
101100
elif search_filter.operator == "NOT IN":
102-
query_q = ~Q(version__in=raw_value)
101+
query_q = ~Q(version__in=value_o)
103102
elif search_filter.operator == "IN":
104-
query_q = Q(version__in=raw_value)
105-
elif raw_value == "latest":
103+
query_q = Q(version__in=value_o)
104+
elif value_o == "latest":
106105
latest_releases = get_latest_release(
107106
projects=filter_params["project_id"],
108107
environments=filter_params.get("environment"),

src/sentry/api/paginator.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -616,7 +616,7 @@ def __init__(self, intermediaries, desc=False, on_results=None, case_insensitive
616616
), "When sorting by a date, it must be the key used on all intermediaries"
617617

618618
def key_from_item(self, item):
619-
return self.model_key_map.get(type(item))[0]
619+
return self.model_key_map[type(item)][0]
620620

621621
def _prep_value(self, item, key, for_prev):
622622
"""
@@ -664,10 +664,10 @@ def _build_combined_querysets(self, is_prev):
664664
def _sort_combined_querysets(item):
665665
sort_keys = []
666666
sort_keys.append(self.get_item_key(item))
667-
if len(self.model_key_map.get(type(item))) > 1:
667+
if len(self.model_key_map[type(item)]) > 1:
668668
# XXX: This doesn't do anything - it just uses a column name as the sort key. It should be pulling the
669669
# value of the other keys out instead.
670-
sort_keys.extend(iter(self.model_key_map.get(type(item))[1:]))
670+
sort_keys.extend(iter(self.model_key_map[type(item)][1:]))
671671
sort_keys.append(type(item).__name__)
672672
return tuple(sort_keys)
673673

src/sentry/integrations/gitlab/issues.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ def make_external_key(self, data):
2222

2323
def get_issue_url(self, key: str) -> str:
2424
match = ISSUE_EXTERNAL_KEY_FORMAT.match(key)
25+
assert match is not None
2526
project, issue_id = match.group(1), match.group(2)
2627
return "{}/{}/issues/{}".format(self.model.metadata["base_url"], project, issue_id)
2728

@@ -34,10 +35,10 @@ def get_projects_and_default(self, group: Group, params: Mapping[str, Any], **kw
3435
# XXX: In GitLab repositories are called projects but get_repository_choices
3536
# expects the param to be called 'repo', so we need to rename it here.
3637
# Django QueryDicts are immutable, so we need to copy it first.
37-
params = params.copy()
38-
params["repo"] = params.get("project") or defaults.get("project")
38+
params_mut = dict(params)
39+
params_mut["repo"] = params.get("project") or defaults.get("project")
3940

40-
default_project, project_choices = self.get_repository_choices(group, params, **kwargs)
41+
default_project, project_choices = self.get_repository_choices(group, params_mut, **kwargs)
4142
return default_project, project_choices
4243

4344
def create_default_repo_choice(self, default_repo):

src/sentry/search/events/builder/base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -222,7 +222,7 @@ def __init__(
222222
self.raw_equations = equations
223223
self.raw_orderby = orderby
224224
self.query = query
225-
self.selected_columns = selected_columns
225+
self.selected_columns = selected_columns or []
226226
self.groupby_columns = groupby_columns
227227
self.tips: dict[str, set[str]] = {
228228
"query": set(),

src/sentry/search/events/builder/metrics.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -262,9 +262,9 @@ def use_on_demand(self) -> bool:
262262
return bool(self._on_demand_metric_spec_map)
263263

264264
@cached_property
265-
def _on_demand_metric_spec_map(self) -> dict[str, OnDemandMetricSpec] | None:
265+
def _on_demand_metric_spec_map(self) -> dict[str, OnDemandMetricSpec]:
266266
if not self.builder_config.on_demand_metrics_enabled:
267-
return None
267+
return {}
268268

269269
spec_map = {}
270270
for col in self.selected_columns:
@@ -1970,9 +1970,9 @@ def translated_groupby(self) -> list[str]:
19701970
return sorted(translated)
19711971

19721972
@cached_property
1973-
def _on_demand_metric_spec_map(self) -> dict[str, OnDemandMetricSpec] | None:
1973+
def _on_demand_metric_spec_map(self) -> dict[str, OnDemandMetricSpec]:
19741974
if not self.builder_config.on_demand_metrics_enabled:
1975-
return None
1975+
return {}
19761976

19771977
return {
19781978
col: self._get_on_demand_metric_spec(col)

src/sentry/search/snuba/executors.py

Lines changed: 7 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1569,20 +1569,15 @@ def get_first_release_condition(
15691569
organization=projects[0].organization_id, version__in=versions
15701570
).values_list("version", "id")
15711571
}
1572-
1573-
for version in versions:
1574-
if version not in releases:
1575-
# TODO: This is mostly around for legacy reasons - we should probably just
1576-
# raise a validation here an inform the user that they passed an invalid
1577-
# release
1578-
releases[None] = -1
1579-
# We only need to find the first non-existent release here
1580-
break
1572+
# TODO: This is mostly around for legacy reasons - we should probably just
1573+
# raise a validation here an inform the user that they passed an invalid
1574+
# release
1575+
condition_values = list(releases.values())
1576+
if any(version not in releases for version in versions):
1577+
condition_values.append(-1)
15811578

15821579
return Condition(
1583-
Column("group_first_release", self.entities["attrs"]),
1584-
Op.IN,
1585-
list(releases.values()),
1580+
Column("group_first_release", self.entities["attrs"]), Op.IN, condition_values
15861581
)
15871582

15881583
ISSUE_FIELD_NAME = "group_id"

src/sentry/snuba/metrics/query.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ class MetricField:
4040
op: MetricOperationType | None
4141
metric_mri: str
4242
params: dict[str, None | str | int | float | Sequence[tuple[str | int, ...]]] | None = None
43-
alias: str | None = None
43+
alias: str = ""
4444

4545
def __post_init__(self) -> None:
4646
# Validate that it is a valid MRI format
@@ -87,7 +87,7 @@ class MetricActionByField:
8787

8888
@dataclass(frozen=True)
8989
class MetricGroupByField(MetricActionByField):
90-
alias: str | None = None
90+
alias: str = ""
9191

9292
def __post_init__(self) -> None:
9393
if not self.alias:

src/sentry/snuba/metrics/query_builder.py

Lines changed: 13 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1099,10 +1099,10 @@ def __generate_time_groupby_column_for_discover_queries(interval: int) -> Functi
10991099
def __update_query_dicts_with_component_entities(
11001100
self,
11011101
component_entities: dict[MetricEntity, Sequence[str]],
1102-
metric_mri_to_obj_dict: dict[tuple[str | None, str, str | None], MetricExpressionBase],
1103-
fields_in_entities: dict[MetricEntity, list[tuple[str | None, str, str | None]]],
1102+
metric_mri_to_obj_dict: dict[tuple[str | None, str, str], MetricExpressionBase],
1103+
fields_in_entities: dict[MetricEntity, list[tuple[str | None, str, str]]],
11041104
parent_alias,
1105-
) -> dict[tuple[str | None, str, str | None], MetricExpressionBase]:
1105+
) -> dict[tuple[str | None, str, str], MetricExpressionBase]:
11061106
# At this point in time, we are only supporting raw metrics in the metrics attribute of
11071107
# any instance of DerivedMetric, and so in this case the op will always be None
11081108
# ToDo(ahmed): In future PR, we might want to allow for dependency metrics to also have an
@@ -1127,11 +1127,11 @@ def __update_query_dicts_with_component_entities(
11271127
return metric_mri_to_obj_dict
11281128

11291129
def get_snuba_queries(self):
1130-
metric_mri_to_obj_dict: dict[tuple[str | None, str, str | None], MetricExpressionBase] = {}
1131-
fields_in_entities: dict[MetricEntity, list[tuple[str | None, str, str | None]]] = {}
1130+
metric_mri_to_obj_dict: dict[tuple[str | None, str, str], MetricExpressionBase] = {}
1131+
fields_in_entities: dict[MetricEntity, list[tuple[str | None, str, str]]] = {}
11321132

1133-
for field in self._metrics_query.select:
1134-
metric_field_obj = metric_object_factory(field.op, field.metric_mri)
1133+
for select_field in self._metrics_query.select:
1134+
metric_field_obj = metric_object_factory(select_field.op, select_field.metric_mri)
11351135
# `get_entity` is called the first, to fetch the entities of constituent metrics,
11361136
# and validate especially in the case of SingularEntityDerivedMetric that it is
11371137
# actually composed of metrics that belong to the same entity
@@ -1155,7 +1155,7 @@ def get_snuba_queries(self):
11551155
component_entities=component_entities,
11561156
metric_mri_to_obj_dict=metric_mri_to_obj_dict,
11571157
fields_in_entities=fields_in_entities,
1158-
parent_alias=field.alias,
1158+
parent_alias=select_field.alias,
11591159
)
11601160
continue
11611161
elif isinstance(component_entities, str):
@@ -1172,9 +1172,11 @@ def get_snuba_queries(self):
11721172
if entity not in self._implemented_datasets:
11731173
raise NotImplementedError(f"Dataset not yet implemented: {entity}")
11741174

1175-
metric_mri_to_obj_dict[(field.op, field.metric_mri, field.alias)] = metric_field_obj
1175+
metric_mri_to_obj_dict[
1176+
(select_field.op, select_field.metric_mri, select_field.alias)
1177+
] = metric_field_obj
11761178
fields_in_entities.setdefault(entity, []).append(
1177-
(field.op, field.metric_mri, field.alias)
1179+
(select_field.op, select_field.metric_mri, select_field.alias)
11781180
)
11791181

11801182
where = self._build_where()
@@ -1252,7 +1254,7 @@ def __init__(
12521254
self,
12531255
organization_id: int,
12541256
metrics_query: DeprecatingMetricsQuery,
1255-
fields_in_entities: dict,
1257+
fields_in_entities: dict[MetricEntity, list[tuple[str | None, str, str]]],
12561258
intervals: list[datetime],
12571259
results,
12581260
use_case_id: UseCaseID,

src/sentry/testutils/cases.py

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,7 @@
4343
from rest_framework.response import Response
4444
from rest_framework.test import APITestCase as BaseAPITestCase
4545
from rest_framework.test import APITransactionTestCase as BaseAPITransactionTestCase
46+
from sentry_kafka_schemas.schema_types.snuba_spans_v1 import SpanEvent
4647
from sentry_kafka_schemas.schema_types.uptime_results_v1 import (
4748
CHECKSTATUS_FAILURE,
4849
CHECKSTATUSREASONTYPE_TIMEOUT,
@@ -279,6 +280,7 @@ def make_request(
279280
request.META["SERVER_NAME"] = "testserver"
280281
request.META["SERVER_PORT"] = 80
281282
if secure_scheme:
283+
assert settings.SECURE_PROXY_SSL_HEADER is not None
282284
secure_header = settings.SECURE_PROXY_SSL_HEADER
283285
request.META[secure_header[0]] = secure_header[1]
284286

@@ -1366,7 +1368,7 @@ def store_segment(
13661368
transaction: str | None = None,
13671369
duration: int = 10,
13681370
exclusive_time: int = 5,
1369-
tags: Mapping[str, Any] | None = None,
1371+
tags: dict[str, str] | None = None,
13701372
measurements: Mapping[str, int | float] | None = None,
13711373
timestamp: datetime | None = None,
13721374
sdk_name: str | None = None,
@@ -1380,7 +1382,7 @@ def store_segment(
13801382
if timestamp is None:
13811383
timestamp = timezone.now()
13821384

1383-
payload = {
1385+
payload: SpanEvent = {
13841386
"project_id": project_id,
13851387
"organization_id": organization_id,
13861388
"span_id": span_id,
@@ -1389,7 +1391,7 @@ def store_segment(
13891391
"start_timestamp_precise": timestamp.timestamp(),
13901392
"end_timestamp_precise": timestamp.timestamp() + duration / 1000,
13911393
"exclusive_time_ms": int(exclusive_time),
1392-
"description": transaction,
1394+
"description": transaction or "",
13931395
"is_segment": True,
13941396
"received": timezone.now().timestamp(),
13951397
"start_timestamp_ms": int(timestamp.timestamp() * 1000),
@@ -1411,7 +1413,7 @@ def store_segment(
14111413
if parent_span_id:
14121414
payload["parent_span_id"] = parent_span_id
14131415
if sdk_name is not None:
1414-
payload["sentry_tags"]["sdk.name"] = sdk_name
1416+
payload["sentry_tags"]["sdk.name"] = sdk_name # type: ignore[typeddict-unknown-key] # needs extra_items support
14151417
if op is not None:
14161418
payload["sentry_tags"]["op"] = op
14171419
if status is not None:
@@ -1431,7 +1433,7 @@ def store_indexed_span(
14311433
op: str | None = None,
14321434
duration: int = 10,
14331435
exclusive_time: int = 5,
1434-
tags: Mapping[str, Any] | None = None,
1436+
tags: dict[str, str] | None = None,
14351437
measurements: Mapping[str, int | float] | None = None,
14361438
timestamp: datetime | None = None,
14371439
store_only_summary: bool = False,
@@ -1445,7 +1447,7 @@ def store_indexed_span(
14451447
if timestamp is None:
14461448
timestamp = timezone.now()
14471449

1448-
payload = {
1450+
payload: SpanEvent = {
14491451
"project_id": project_id,
14501452
"organization_id": organization_id,
14511453
"span_id": span_id,
@@ -1479,7 +1481,7 @@ def store_indexed_span(
14791481
if parent_span_id:
14801482
payload["parent_span_id"] = parent_span_id
14811483
if category is not None:
1482-
payload["sentry_tags"]["category"] = category
1484+
payload["sentry_tags"]["category"] = category # type: ignore[typeddict-unknown-key] # needs extra_items support
14831485

14841486
# We want to give the caller the possibility to store only a summary since the database does not deduplicate
14851487
# on the span_id which makes the assumptions of a unique span_id in the database invalid.

0 commit comments

Comments
 (0)