Skip to content

Commit 7b2967d

Browse files
committed
feat(preprod): Hook size analysis detector to diff
1 parent affc2c2 commit 7b2967d

File tree

2 files changed

+187
-29
lines changed

2 files changed

+187
-29
lines changed

src/sentry/preprod/size_analysis/tasks.py

Lines changed: 41 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -8,14 +8,18 @@
88
from django.utils import timezone
99

1010
from sentry import features
11-
from sentry.issues.producer import PayloadType, produce_occurrence_to_kafka
1211
from sentry.models.files.file import File
1312
from sentry.preprod.models import (
1413
PreprodArtifact,
1514
PreprodArtifactSizeComparison,
1615
PreprodArtifactSizeMetrics,
1716
)
1817
from sentry.preprod.size_analysis.compare import compare_size_analysis
18+
from sentry.preprod.size_analysis.grouptype import (
19+
PreprodSizeAnalysisGroupType,
20+
SizeAnalysisDataPacket,
21+
SizeAnalysisValue,
22+
)
1923
from sentry.preprod.size_analysis.models import ComparisonResults, SizeAnalysisResults
2024
from sentry.preprod.size_analysis.utils import build_size_metrics_map, can_compare_size_metrics
2125
from sentry.preprod.vcs.status_checks.size.tasks import create_preprod_status_check_task
@@ -24,8 +28,8 @@
2428
from sentry.taskworker.namespaces import preprod_tasks
2529
from sentry.utils import metrics
2630
from sentry.utils.json import dumps_htmlsafe
27-
28-
from .issues import diff_to_occurrence
31+
from sentry.workflow_engine.models import DataPacket, Detector
32+
from sentry.workflow_engine.processors.detector import process_detectors
2933

3034
logger = logging.getLogger(__name__)
3135

@@ -551,37 +555,45 @@ def _maybe_emit_issues(
551555
)
552556
return
553557

554-
# TODO(EME-80): Make threshold configurable:
555-
arbitrary_threshold = 100 * 1024
556-
diff = comparison_results.size_metric_diff_item
557-
download_delta = diff.head_download_size - diff.base_download_size
558-
install_delta = diff.head_install_size - diff.base_install_size
559-
560-
issue_count = 0
561-
562-
if download_delta >= arbitrary_threshold:
563-
occurrence, event_data = diff_to_occurrence("download", diff, head_metric, base_metric)
564-
produce_occurrence_to_kafka(
565-
payload_type=PayloadType.OCCURRENCE,
566-
occurrence=occurrence,
567-
event_data=event_data,
558+
detectors = list(
559+
Detector.objects.filter(
560+
project_id=project_id,
561+
type=PreprodSizeAnalysisGroupType.slug,
562+
enabled=True,
568563
)
569-
issue_count += 1
570-
571-
if install_delta >= arbitrary_threshold:
572-
occurrence, event_data = diff_to_occurrence("install", diff, head_metric, base_metric)
573-
produce_occurrence_to_kafka(
574-
payload_type=PayloadType.OCCURRENCE,
575-
occurrence=occurrence,
576-
event_data=event_data,
564+
)
565+
if not detectors:
566+
logger.info(
567+
"preprod.size_analysis.no_detectors",
568+
extra={"project_id": project_id},
577569
)
578-
issue_count += 1
570+
return
579571

572+
diff = comparison_results.size_metric_diff_item
573+
size_data: SizeAnalysisValue = {
574+
"head_install_size_bytes": diff.head_install_size,
575+
"head_download_size_bytes": diff.head_download_size,
576+
"base_install_size_bytes": diff.base_install_size,
577+
"base_download_size_bytes": diff.base_download_size,
578+
}
579+
580+
data_packet: SizeAnalysisDataPacket = DataPacket(
581+
source_id=f"preprod-size-analysis:{project_id}",
582+
packet=size_data,
583+
)
584+
585+
logger.info(
586+
"preprod.size_analysis.process_detectors.starting",
587+
extra={
588+
"project_id": project_id,
589+
"detector_count": len(detectors),
590+
},
591+
)
592+
results = process_detectors(data_packet, detectors)
580593
logger.info(
581-
"preprod.size_analysis.compare.issues",
594+
"preprod.size_analysis.process_detectors.completed",
582595
extra={
583596
"project_id": project_id,
584-
"organization_id": organization_id,
585-
"issue_count": issue_count,
597+
"detector_count": len(results),
586598
},
587599
)
Lines changed: 146 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,146 @@
1+
from unittest.mock import patch
2+
3+
from sentry.preprod.models import PreprodArtifactSizeMetrics
4+
from sentry.preprod.size_analysis.grouptype import PreprodSizeAnalysisGroupType
5+
from sentry.preprod.size_analysis.models import ComparisonResults, SizeMetricDiffItem
6+
from sentry.preprod.size_analysis.tasks import maybe_emit_issues
7+
from sentry.testutils.cases import TestCase
8+
from sentry.workflow_engine.models.data_condition import Condition
9+
from sentry.workflow_engine.types import DetectorPriorityLevel
10+
11+
12+
class MaybeEmitIssuesTest(TestCase):
13+
"""Tests for the maybe_emit_issues function."""
14+
15+
def _create_comparison_results(
16+
self,
17+
head_install_size: int = 5000000,
18+
head_download_size: int = 2000000,
19+
base_install_size: int = 4000000,
20+
base_download_size: int = 1500000,
21+
) -> ComparisonResults:
22+
return ComparisonResults(
23+
diff_items=[],
24+
insight_diff_items=[],
25+
size_metric_diff_item=SizeMetricDiffItem(
26+
metrics_artifact_type=PreprodArtifactSizeMetrics.MetricsArtifactType.MAIN_ARTIFACT,
27+
identifier=None,
28+
head_install_size=head_install_size,
29+
head_download_size=head_download_size,
30+
base_install_size=base_install_size,
31+
base_download_size=base_download_size,
32+
),
33+
skipped_diff_item_comparison=False,
34+
)
35+
36+
def test_maybe_emit_issues_triggers_detector_evaluation(self):
37+
head_artifact = self.create_preprod_artifact(project=self.project, app_id="com.example.app")
38+
base_artifact = self.create_preprod_artifact(project=self.project, app_id="com.example.app")
39+
40+
head_metric = self.create_preprod_artifact_size_metrics(
41+
head_artifact,
42+
metrics_type=PreprodArtifactSizeMetrics.MetricsArtifactType.MAIN_ARTIFACT,
43+
identifier=None,
44+
max_install_size=5000000,
45+
max_download_size=2000000,
46+
state=PreprodArtifactSizeMetrics.SizeAnalysisState.COMPLETED,
47+
)
48+
49+
base_metric = self.create_preprod_artifact_size_metrics(
50+
base_artifact,
51+
metrics_type=PreprodArtifactSizeMetrics.MetricsArtifactType.MAIN_ARTIFACT,
52+
identifier=None,
53+
max_install_size=4000000,
54+
max_download_size=1500000,
55+
state=PreprodArtifactSizeMetrics.SizeAnalysisState.COMPLETED,
56+
)
57+
58+
condition_group = self.create_data_condition_group(
59+
organization=self.project.organization,
60+
)
61+
self.create_data_condition(
62+
condition_group=condition_group,
63+
type=Condition.GREATER,
64+
comparison=1000000,
65+
condition_result=DetectorPriorityLevel.HIGH,
66+
)
67+
self.create_detector(
68+
name="test-detector",
69+
project=self.project,
70+
type=PreprodSizeAnalysisGroupType.slug,
71+
config={"threshold_type": "absolute_threshold", "measurement": "install_size"},
72+
workflow_condition_group=condition_group,
73+
)
74+
75+
comparison_results = self._create_comparison_results()
76+
77+
with self.feature("organizations:preprod-issues"):
78+
with patch(
79+
"sentry.workflow_engine.processors.detector.produce_occurrence_to_kafka"
80+
) as mock_produce:
81+
maybe_emit_issues(comparison_results, head_metric, base_metric)
82+
83+
assert mock_produce.call_count == 1
84+
85+
def test_maybe_emit_issues_no_detectors(self):
86+
head_artifact = self.create_preprod_artifact(project=self.project, app_id="com.example.app")
87+
base_artifact = self.create_preprod_artifact(project=self.project, app_id="com.example.app")
88+
89+
head_metric = self.create_preprod_artifact_size_metrics(
90+
head_artifact,
91+
metrics_type=PreprodArtifactSizeMetrics.MetricsArtifactType.MAIN_ARTIFACT,
92+
identifier=None,
93+
max_install_size=5000000,
94+
max_download_size=2000000,
95+
state=PreprodArtifactSizeMetrics.SizeAnalysisState.COMPLETED,
96+
)
97+
98+
base_metric = self.create_preprod_artifact_size_metrics(
99+
base_artifact,
100+
metrics_type=PreprodArtifactSizeMetrics.MetricsArtifactType.MAIN_ARTIFACT,
101+
identifier=None,
102+
max_install_size=4000000,
103+
max_download_size=1500000,
104+
state=PreprodArtifactSizeMetrics.SizeAnalysisState.COMPLETED,
105+
)
106+
107+
comparison_results = self._create_comparison_results()
108+
109+
with self.feature("organizations:preprod-issues"):
110+
with patch(
111+
"sentry.workflow_engine.processors.detector.produce_occurrence_to_kafka"
112+
) as mock_produce:
113+
maybe_emit_issues(comparison_results, head_metric, base_metric)
114+
115+
assert mock_produce.call_count == 0
116+
117+
def test_maybe_emit_issues_feature_flag_disabled(self):
118+
head_artifact = self.create_preprod_artifact(project=self.project, app_id="com.example.app")
119+
base_artifact = self.create_preprod_artifact(project=self.project, app_id="com.example.app")
120+
121+
head_metric = self.create_preprod_artifact_size_metrics(
122+
head_artifact,
123+
metrics_type=PreprodArtifactSizeMetrics.MetricsArtifactType.MAIN_ARTIFACT,
124+
identifier=None,
125+
max_install_size=5000000,
126+
max_download_size=2000000,
127+
state=PreprodArtifactSizeMetrics.SizeAnalysisState.COMPLETED,
128+
)
129+
130+
base_metric = self.create_preprod_artifact_size_metrics(
131+
base_artifact,
132+
metrics_type=PreprodArtifactSizeMetrics.MetricsArtifactType.MAIN_ARTIFACT,
133+
identifier=None,
134+
max_install_size=4000000,
135+
max_download_size=1500000,
136+
state=PreprodArtifactSizeMetrics.SizeAnalysisState.COMPLETED,
137+
)
138+
139+
comparison_results = self._create_comparison_results()
140+
141+
with patch(
142+
"sentry.workflow_engine.processors.detector.produce_occurrence_to_kafka"
143+
) as mock_produce:
144+
maybe_emit_issues(comparison_results, head_metric, base_metric)
145+
146+
assert mock_produce.call_count == 0

0 commit comments

Comments
 (0)