Skip to content

Commit 940c5b5

Browse files
authored
feat(preprod): support multiple apps in the status check (#98846)
Updates our status check logic to support multiple apps for the same commit, say if the user is uploading from a monorepo or producing multiple apps like an iOS and macOS variant. <img width="855" height="327" alt="Screenshot 2025-09-04 at 5 09 51 PM" src="https://github.com/user-attachments/assets/d17dfa85-1e92-4594-84c3-4ed6e50136dc" />
1 parent d9da406 commit 940c5b5

File tree

9 files changed

+1414
-212
lines changed

9 files changed

+1414
-212
lines changed

bin/preprod/trigger_size_status_check

Lines changed: 420 additions & 0 deletions
Large diffs are not rendered by default.

src/sentry/preprod/models.py

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -130,6 +130,24 @@ def as_choices(cls):
130130
# An identifier for the main binary
131131
main_binary_identifier = models.CharField(max_length=255, db_index=True, null=True)
132132

133+
def get_sibling_artifacts_for_commit(self) -> models.QuerySet["PreprodArtifact"]:
134+
"""
135+
Get all artifacts for the same commit comparison (monorepo scenario).
136+
137+
Note: Always includes the calling artifact itself along with any siblings.
138+
Results are filtered by the current artifact's organization for security.
139+
140+
Returns:
141+
QuerySet of PreprodArtifact objects, ordered by app_id for stable results
142+
"""
143+
if not self.commit_comparison:
144+
return PreprodArtifact.objects.none()
145+
146+
return PreprodArtifact.objects.filter(
147+
commit_comparison=self.commit_comparison,
148+
project__organization_id=self.project.organization_id,
149+
).order_by("app_id")
150+
133151
class Meta:
134152
app_label = "preprod"
135153
db_table = "sentry_preprodartifact"

src/sentry/preprod/tasks.py

Lines changed: 78 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -48,12 +48,12 @@
4848
),
4949
)
5050
def assemble_preprod_artifact(
51-
org_id,
52-
project_id,
53-
checksum,
54-
chunks,
55-
artifact_id,
56-
**kwargs,
51+
org_id: int,
52+
project_id: int,
53+
checksum: Any,
54+
chunks: Any,
55+
artifact_id: int,
56+
**kwargs: Any,
5757
) -> None:
5858
"""
5959
Creates a preprod artifact from uploaded chunks.
@@ -241,6 +241,15 @@ def create_preprod_artifact(
241241
extras=extras,
242242
)
243243

244+
# TODO(preprod): add gating to only create if has quota
245+
PreprodArtifactSizeMetrics.objects.get_or_create(
246+
preprod_artifact=preprod_artifact,
247+
metrics_artifact_type=PreprodArtifactSizeMetrics.MetricsArtifactType.MAIN_ARTIFACT,
248+
defaults={
249+
"state": PreprodArtifactSizeMetrics.SizeAnalysisState.PENDING,
250+
},
251+
)
252+
244253
logger.info(
245254
"Created preprod artifact row",
246255
extra={
@@ -329,8 +338,9 @@ def _assemble_preprod_artifact_file(
329338

330339

331340
def _assemble_preprod_artifact_size_analysis(
332-
assemble_result: AssembleResult, project, artifact_id, org_id
341+
assemble_result: AssembleResult, project, artifact_id: int, org_id: int
333342
):
343+
preprod_artifact = None
334344
try:
335345
preprod_artifact = PreprodArtifact.objects.get(
336346
project=project,
@@ -357,35 +367,73 @@ def _assemble_preprod_artifact_size_analysis(
357367
pass # Ignore cleanup errors
358368
raise Exception(f"PreprodArtifact with id {artifact_id} does not exist")
359369

360-
size_analysis_results = SizeAnalysisResults.parse_raw(assemble_result.bundle_temp_file.read())
370+
try:
371+
size_analysis_results = SizeAnalysisResults.parse_raw(
372+
assemble_result.bundle_temp_file.read()
373+
)
374+
375+
with transaction.atomic(router.db_for_write(PreprodArtifactSizeMetrics)):
376+
# TODO(preprod): parse this from the treemap json and handle other artifact types
377+
size_metrics, created = PreprodArtifactSizeMetrics.objects.update_or_create(
378+
preprod_artifact=preprod_artifact,
379+
metrics_artifact_type=PreprodArtifactSizeMetrics.MetricsArtifactType.MAIN_ARTIFACT,
380+
defaults={
381+
"analysis_file_id": assemble_result.bundle.id,
382+
"min_install_size": None, # No min value at this time
383+
"max_install_size": size_analysis_results.install_size,
384+
"min_download_size": None, # No min value at this time
385+
"max_download_size": size_analysis_results.download_size,
386+
"state": PreprodArtifactSizeMetrics.SizeAnalysisState.COMPLETED,
387+
},
388+
)
361389

362-
# Update size metrics in its own transaction
363-
with transaction.atomic(router.db_for_write(PreprodArtifactSizeMetrics)):
364-
size_metrics, created = PreprodArtifactSizeMetrics.objects.update_or_create(
365-
preprod_artifact=preprod_artifact,
366-
defaults={
390+
logger.info(
391+
"Created or updated preprod artifact size metrics with analysis file",
392+
extra={
393+
"preprod_artifact_id": preprod_artifact.id,
394+
"size_metrics_id": size_metrics.id,
367395
"analysis_file_id": assemble_result.bundle.id,
368-
"metrics_artifact_type": PreprodArtifactSizeMetrics.MetricsArtifactType.MAIN_ARTIFACT, # TODO: parse this from the treemap json
369-
"min_install_size": None, # No min value at this time
370-
"max_install_size": size_analysis_results.install_size,
371-
"min_download_size": None, # No min value at this time
372-
"max_download_size": size_analysis_results.download_size,
373-
"state": PreprodArtifactSizeMetrics.SizeAnalysisState.COMPLETED,
396+
"was_created": created,
397+
"project_id": project.id,
398+
"organization_id": org_id,
374399
},
375400
)
376401

377-
logger.info(
378-
"Created or updated preprod artifact size metrics with analysis file",
379-
extra={
380-
"preprod_artifact_id": preprod_artifact.id,
381-
"size_metrics_id": size_metrics.id,
382-
"analysis_file_id": assemble_result.bundle.id,
383-
"was_created": created,
384-
"project_id": project.id,
385-
"organization_id": org_id,
386-
},
387-
)
402+
except Exception as e:
403+
logger.exception(
404+
"Failed to process size analysis results",
405+
extra={
406+
"preprod_artifact_id": artifact_id,
407+
"project_id": project.id,
408+
"organization_id": org_id,
409+
},
410+
)
411+
412+
with transaction.atomic(router.db_for_write(PreprodArtifactSizeMetrics)):
413+
try:
414+
PreprodArtifactSizeMetrics.objects.update_or_create(
415+
preprod_artifact=preprod_artifact,
416+
metrics_artifact_type=PreprodArtifactSizeMetrics.MetricsArtifactType.MAIN_ARTIFACT,
417+
defaults={
418+
"state": PreprodArtifactSizeMetrics.SizeAnalysisState.FAILED,
419+
"error_code": PreprodArtifactSizeMetrics.ErrorCode.PROCESSING_ERROR,
420+
"error_message": str(e),
421+
},
422+
)
423+
except Exception:
424+
logger.exception(
425+
"Failed to update preprod artifact size metrics",
426+
extra={
427+
"preprod_artifact_id": artifact_id,
428+
"project_id": project.id,
429+
"organization_id": org_id,
430+
},
431+
)
432+
433+
# Re-raise to trigger further error handling if needed
434+
raise
388435

436+
# Always trigger status check update (success or failure)
389437
create_preprod_status_check_task.apply_async(
390438
kwargs={
391439
"preprod_artifact_id": artifact_id,

src/sentry/preprod/vcs/status_checks/tasks.py

Lines changed: 54 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
from sentry.models.commitcomparison import CommitComparison
2222
from sentry.models.project import Project
2323
from sentry.models.repository import Repository
24-
from sentry.preprod.models import PreprodArtifact
24+
from sentry.preprod.models import PreprodArtifact, PreprodArtifactSizeMetrics
2525
from sentry.preprod.url_utils import get_preprod_artifact_url
2626
from sentry.preprod.vcs.status_checks.templates import format_status_check_messages
2727
from sentry.silo.base import SiloMode
@@ -58,20 +58,6 @@ def create_preprod_status_check_task(preprod_artifact_id: int) -> None:
5858
extra={"artifact_id": preprod_artifact.id},
5959
)
6060

61-
match preprod_artifact.state:
62-
case PreprodArtifact.ArtifactState.UPLOADING | PreprodArtifact.ArtifactState.UPLOADED:
63-
status = StatusCheckStatus.IN_PROGRESS
64-
case PreprodArtifact.ArtifactState.FAILED:
65-
status = StatusCheckStatus.FAILURE
66-
case PreprodArtifact.ArtifactState.PROCESSED:
67-
status = StatusCheckStatus.SUCCESS
68-
case _:
69-
raise ValueError(f"Unhandled artifact state: {preprod_artifact.state}")
70-
71-
title, subtitle, summary = format_status_check_messages(preprod_artifact)
72-
73-
target_url = get_preprod_artifact_url(preprod_artifact)
74-
7561
if not preprod_artifact.commit_comparison:
7662
logger.info(
7763
"preprod.status_checks.create.no_commit_comparison",
@@ -91,6 +77,9 @@ def create_preprod_status_check_task(preprod_artifact_id: int) -> None:
9177
)
9278
return
9379

80+
# Get all artifacts for this commit across all projects in the organization
81+
all_artifacts = list(preprod_artifact.get_sibling_artifacts_for_commit())
82+
9483
client, repository = _get_status_check_client(preprod_artifact.project, commit_comparison)
9584
if not client or not repository:
9685
# logging handled in _get_status_check_client. for now we can be lax about users potentially
@@ -110,6 +99,24 @@ def create_preprod_status_check_task(preprod_artifact_id: int) -> None:
11099
)
111100
return
112101

102+
size_metrics_map: dict[int, list[PreprodArtifactSizeMetrics]] = {}
103+
if all_artifacts:
104+
artifact_ids = [artifact.id for artifact in all_artifacts]
105+
size_metrics_qs = PreprodArtifactSizeMetrics.objects.filter(
106+
preprod_artifact_id__in=artifact_ids,
107+
).select_related("preprod_artifact")
108+
109+
for metrics in size_metrics_qs:
110+
if metrics.preprod_artifact_id not in size_metrics_map:
111+
size_metrics_map[metrics.preprod_artifact_id] = []
112+
size_metrics_map[metrics.preprod_artifact_id].append(metrics)
113+
114+
status = _compute_overall_status(all_artifacts, size_metrics_map)
115+
116+
title, subtitle, summary = format_status_check_messages(all_artifacts, size_metrics_map, status)
117+
118+
target_url = get_preprod_artifact_url(preprod_artifact)
119+
113120
check_id = provider.create_status_check(
114121
repo=commit_comparison.head_repo_name,
115122
sha=commit_comparison.head_sha,
@@ -138,6 +145,38 @@ def create_preprod_status_check_task(preprod_artifact_id: int) -> None:
138145
)
139146

140147

148+
def _compute_overall_status(
149+
artifacts: list[PreprodArtifact], size_metrics_map: dict[int, list[PreprodArtifactSizeMetrics]]
150+
) -> StatusCheckStatus:
151+
if not artifacts:
152+
raise ValueError("Cannot compute status for empty artifact list")
153+
154+
states = {artifact.state for artifact in artifacts}
155+
156+
if PreprodArtifact.ArtifactState.FAILED in states:
157+
return StatusCheckStatus.FAILURE
158+
elif (
159+
PreprodArtifact.ArtifactState.UPLOADING in states
160+
or PreprodArtifact.ArtifactState.UPLOADED in states
161+
):
162+
return StatusCheckStatus.IN_PROGRESS
163+
elif all(state == PreprodArtifact.ArtifactState.PROCESSED for state in states):
164+
# All artifacts are processed, but we need to check if size analysis (if present) is complete
165+
for artifact in artifacts:
166+
size_metrics_list = size_metrics_map.get(artifact.id, [])
167+
if size_metrics_list:
168+
for size_metrics in size_metrics_list:
169+
if size_metrics.state == PreprodArtifactSizeMetrics.SizeAnalysisState.FAILED:
170+
return StatusCheckStatus.FAILURE
171+
elif (
172+
size_metrics.state != PreprodArtifactSizeMetrics.SizeAnalysisState.COMPLETED
173+
):
174+
return StatusCheckStatus.IN_PROGRESS
175+
return StatusCheckStatus.SUCCESS
176+
else:
177+
return StatusCheckStatus.IN_PROGRESS
178+
179+
141180
def _get_status_check_client(
142181
project: Project, commit_comparison: CommitComparison
143182
) -> tuple[StatusCheckClient, Repository] | tuple[None, None]:

0 commit comments

Comments
 (0)