Skip to content
This repository was archived by the owner on May 5, 2025. It is now read-only.

Commit 87f463e

Browse files
feat: new TA processor implementation (#1117)
1 parent 246f7d1 commit 87f463e

File tree

11 files changed

+775
-1
lines changed

11 files changed

+775
-1
lines changed

conftest.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -204,6 +204,8 @@ def mock_configuration(mocker):
204204
"hash_key": "88f572f4726e4971827415efa8867978",
205205
"secret_access_key": "codecov-default-secret",
206206
"verify_ssl": False,
207+
"host": "minio",
208+
"port": 9000,
207209
},
208210
"smtp": {
209211
"host": "mailhog",

docker-compose.yml

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ services:
77
- postgres
88
- redis
99
- timescale
10+
- minio
1011
volumes:
1112
- ./:/app/apps/worker
1213
- ./docker/test_codecov_config.yml:/config/codecov.yml
@@ -41,6 +42,19 @@ services:
4142
volumes:
4243
- ./docker/init_db.sql:/docker-entrypoint-initdb.d/init_db.sql
4344

45+
minio:
46+
image: minio/minio:latest
47+
command: server /export
48+
ports:
49+
- "${MINIO_PORT:-9000}:9000"
50+
environment:
51+
- MINIO_ACCESS_KEY=codecov-default-key
52+
- MINIO_SECRET_KEY=codecov-default-secret
53+
volumes:
54+
- type: tmpfs
55+
target: /export
56+
tmpfs:
57+
size: 256M
4458
redis:
4559
image: redis:6-alpine
4660

docker/test_codecov_config.yml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,8 @@ services:
1818
access_key_id: codecov-default-key
1919
secret_access_key: codecov-default-secret
2020
verify_ssl: false
21+
port: 9000
22+
host: minio
2123
smtp:
2224
host: mailhog
2325
port: 1025
Lines changed: 104 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,104 @@
1+
from __future__ import annotations
2+
3+
from dataclasses import dataclass
4+
from typing import Any
5+
6+
import sentry_sdk
7+
import test_results_parser
8+
from shared.config import get_config
9+
from shared.django_apps.core.models import Commit, Repository
10+
from shared.django_apps.reports.models import ReportSession, UploadError
11+
12+
from services.archive import ArchiveService
13+
from services.test_analytics.ta_timeseries import get_flaky_tests_set, insert_testrun
14+
from services.yaml import UserYaml, read_yaml_field
15+
16+
17+
@dataclass
18+
class TAProcInfo:
19+
repository: Repository
20+
branch: str | None
21+
user_yaml: UserYaml
22+
23+
24+
def handle_file_not_found(upload: ReportSession):
25+
upload.state = "processed"
26+
upload.save()
27+
UploadError.objects.create(
28+
report_session=upload,
29+
error_code="file_not_in_storage",
30+
error_params={},
31+
)
32+
33+
34+
def handle_parsing_error(upload: ReportSession, exc: Exception):
35+
sentry_sdk.capture_exception(exc, tags={"upload_state": upload.state})
36+
upload.state = "processed"
37+
upload.save()
38+
UploadError.objects.create(
39+
report_session=upload,
40+
error_code="unsupported_file_format",
41+
error_params={"error_message": str(exc)},
42+
)
43+
44+
45+
def get_ta_processing_info(
46+
repoid: int,
47+
commitid: str,
48+
commit_yaml: dict[str, Any],
49+
) -> TAProcInfo:
50+
repository = Repository.objects.get(repoid=repoid)
51+
52+
commit = Commit.objects.get(repository=repository, commitid=commitid)
53+
branch = commit.branch
54+
if branch is None:
55+
raise ValueError("Branch is None")
56+
57+
user_yaml: UserYaml = UserYaml(commit_yaml)
58+
return TAProcInfo(
59+
repository,
60+
branch,
61+
user_yaml,
62+
)
63+
64+
65+
def should_delete_archive_settings(user_yaml: UserYaml) -> bool:
66+
if get_config("services", "minio", "expire_raw_after_n_days"):
67+
return True
68+
return not read_yaml_field(user_yaml, ("codecov", "archive", "uploads"), _else=True)
69+
70+
71+
def rewrite_or_delete_upload(
72+
archive_service: ArchiveService,
73+
user_yaml: UserYaml,
74+
upload: ReportSession,
75+
readable_file: bytes,
76+
):
77+
if should_delete_archive_settings(user_yaml):
78+
archive_url = upload.storage_path
79+
if archive_url and not archive_url.startswith("http"):
80+
archive_service.delete_file(archive_url)
81+
else:
82+
archive_service.write_file(upload.storage_path, bytes(readable_file))
83+
84+
85+
def insert_testruns_timeseries(
86+
repoid: int,
87+
commitid: str,
88+
branch: str | None,
89+
upload: ReportSession,
90+
parsing_infos: list[test_results_parser.ParsingInfo],
91+
):
92+
flaky_test_set = get_flaky_tests_set(repoid)
93+
94+
for parsing_info in parsing_infos:
95+
insert_testrun(
96+
timestamp=upload.created_at,
97+
repo_id=repoid,
98+
commit_sha=commitid,
99+
branch=branch,
100+
upload_id=upload.id,
101+
flags=upload.flag_names,
102+
parsing_info=parsing_info,
103+
flaky_test_ids=flaky_test_set,
104+
)
Lines changed: 80 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,80 @@
1+
import logging
2+
from typing import Any
3+
4+
from shared.django_apps.reports.models import ReportSession
5+
from shared.storage.exceptions import FileNotInStorageError
6+
from test_results_parser import parse_raw_upload
7+
8+
from services.archive import ArchiveService
9+
from services.processing.types import UploadArguments
10+
from services.test_analytics.ta_processing import (
11+
get_ta_processing_info,
12+
handle_file_not_found,
13+
handle_parsing_error,
14+
insert_testruns_timeseries,
15+
rewrite_or_delete_upload,
16+
)
17+
18+
log = logging.getLogger(__name__)
19+
20+
21+
def ta_processor_impl(
22+
repoid: int,
23+
commitid: str,
24+
commit_yaml: dict[str, Any],
25+
argument: UploadArguments,
26+
update_state: bool = False,
27+
) -> bool:
28+
log.info(
29+
"Processing single TA argument",
30+
extra=dict(
31+
upload_id=argument.get("upload_id"),
32+
repoid=repoid,
33+
commitid=commitid,
34+
),
35+
)
36+
37+
upload_id = argument.get("upload_id")
38+
if upload_id is None:
39+
return False
40+
41+
upload = ReportSession.objects.get(id=upload_id)
42+
if upload.state == "processed":
43+
# don't need to process again because the intermediate result should already be in redis
44+
return False
45+
46+
if upload.storage_path is None:
47+
if update_state:
48+
handle_file_not_found(upload)
49+
return False
50+
51+
ta_proc_info = get_ta_processing_info(repoid, commitid, commit_yaml)
52+
53+
archive_service = ArchiveService(ta_proc_info.repository)
54+
55+
try:
56+
payload_bytes = archive_service.read_file(upload.storage_path)
57+
except FileNotInStorageError:
58+
if update_state:
59+
handle_file_not_found(upload)
60+
return False
61+
62+
try:
63+
parsing_infos, readable_file = parse_raw_upload(payload_bytes)
64+
except RuntimeError as exc:
65+
if update_state:
66+
handle_parsing_error(upload, exc)
67+
return False
68+
69+
insert_testruns_timeseries(
70+
repoid, commitid, ta_proc_info.branch, upload, parsing_infos
71+
)
72+
73+
if update_state:
74+
upload.state = "processed"
75+
upload.save()
76+
77+
rewrite_or_delete_upload(
78+
archive_service, ta_proc_info.user_yaml, upload, readable_file
79+
)
80+
return True
Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
import pytest
2+
from shared.config import get_config
3+
from shared.storage import get_appropriate_storage_service
4+
from shared.storage.exceptions import BucketAlreadyExistsError
5+
6+
7+
@pytest.fixture
8+
def storage(mock_configuration):
9+
storage_service = get_appropriate_storage_service()
10+
try:
11+
storage_service.create_root_storage(get_config("services", "minio", "bucket"))
12+
except BucketAlreadyExistsError:
13+
pass
14+
return storage_service
Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
{
2+
"test_results_files": [
3+
{
4+
"filename": "codecov-demo/temp.junit.xml",
5+
"format": "base64+compressed",
6+
"data": "eJy1VMluwjAQvfMVI1dCoBbHZiklJEFVS4V66Kkqx8okBqw6i2KHwt/XWSChnCrRXDLjefNm8Uuc2T6UsOOpEnHkIooJAh75cSCijYsyve49oJnnaK60yoR5NWyIWMhdlBzyE5OWpnGqXGQY1kzILOXGoQjUl0gSHhSBItdFQ2OJPJdgMuqXjtIsTFzUJ/1Bj9IeuX+n1KZjmwwxoZSMDWwbK13W/HhZvC1fn5eLCZaxzyQq2/KZ4uBLplQJY4nAmocJNhA/k0zHKc5xn7WPqimKYxYEjc6Iad66DrHKVjplvv4f9jCTWiTy0GQnV2MPxE4E/Lxzz6muGMzFKbbJaZXiqQajIHBdIHjUvqFkCrcA31tugEUA2lJP11nkayM3eKobKIsA00D2lAz9CV9NSHujpx16B/3uievI9mceU/sChryAr6ExZKdrtwpw+VQjXeSVPVVjtubn6HoBp8iVlnDGd9VFtFpGFFYuCqsWgfVLFDg52ANiw2Mxpyk3zz94x6qU4DnWUW2VWfwkmrbyfgBbcXMH",
7+
"labels": ""
8+
}
9+
],
10+
"metadata": {}
11+
}
Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
[
2+
{
3+
"timestamp": "2025-01-01T00:00:00+00:00",
4+
"test_id": "7a44f8a4b65ee2abd9617fc99a63fc2e",
5+
"name": "test_1_name",
6+
"classname": "test_1_classname",
7+
"testsuite": "test_1_testsuite",
8+
"computed_name": "test_1_computed_name",
9+
"outcome": "pass",
10+
"duration_seconds": 1.0,
11+
"failure_message": null,
12+
"framework": "Pytest",
13+
"filename": "test_1_file",
14+
"repo_id": 1,
15+
"commit_sha": "123",
16+
"branch": "main",
17+
"flags": [],
18+
"upload_id": 1
19+
},
20+
{
21+
"timestamp": "2025-01-01T00:00:00+00:00",
22+
"test_id": "25ce6e22db03ef4f4230eb999c776f99",
23+
"name": "test_2_name",
24+
"classname": "test_2_classname",
25+
"testsuite": "test_2_testsuite",
26+
"computed_name": "test_2",
27+
"outcome": "failure",
28+
"duration_seconds": 1.0,
29+
"failure_message": "test_2_failure_message",
30+
"framework": "Pytest",
31+
"filename": "test_2_file",
32+
"repo_id": 1,
33+
"commit_sha": "123",
34+
"branch": "main",
35+
"flags": [],
36+
"upload_id": 1
37+
}
38+
]

0 commit comments

Comments
 (0)