Skip to content
This repository was archived by the owner on Jun 13, 2025. It is now read-only.

Commit 022c44b

Browse files
authored
Store v2 reports in GCS instead of Redis (#960)
1 parent 9f3c523 commit 022c44b

File tree

6 files changed

+62
-59
lines changed

6 files changed

+62
-59
lines changed

services/task/task.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -134,6 +134,7 @@ def upload_signature(
134134
commitid,
135135
report_type=None,
136136
report_code=None,
137+
arguments=None,
137138
debug=False,
138139
rebuild=False,
139140
immutable=False,
@@ -145,6 +146,7 @@ def upload_signature(
145146
commitid=commitid,
146147
report_type=report_type,
147148
report_code=report_code,
149+
arguments=arguments,
148150
debug=debug,
149151
rebuild=rebuild,
150152
),
@@ -157,6 +159,7 @@ def upload(
157159
commitid,
158160
report_type=None,
159161
report_code=None,
162+
arguments=None,
160163
countdown=0,
161164
debug=False,
162165
rebuild=False,
@@ -166,6 +169,7 @@ def upload(
166169
commitid,
167170
report_type=report_type,
168171
report_code=report_code,
172+
arguments=arguments,
169173
debug=debug,
170174
rebuild=rebuild,
171175
).apply_async(countdown=countdown)

upload/helpers.py

Lines changed: 1 addition & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -687,18 +687,6 @@ def parse_headers(headers, upload_params):
687687
return {"content_type": content_type, "reduced_redundancy": reduced_redundancy}
688688

689689

690-
def store_report_in_redis(request, commitid, reportid, redis):
691-
encoding = request.META.get("HTTP_X_CONTENT_ENCODING") or request.META.get(
692-
"HTTP_CONTENT_ENCODING"
693-
)
694-
redis_key = (
695-
f"upload/{commitid[:7]}/{reportid}/{'gzip' if encoding == 'gzip' else 'plain'}"
696-
)
697-
redis.setex(redis_key, 10800, request.body)
698-
699-
return redis_key
700-
701-
702690
def dispatch_upload_task(
703691
task_arguments,
704692
repository,
@@ -746,6 +734,7 @@ def dispatch_upload_task(
746734
commitid=commitid,
747735
report_type=str(report_type),
748736
report_code=task_arguments.get("report_code"),
737+
arguments=task_arguments,
749738
countdown=max(
750739
countdown, int(get_config("setup", "upload_processing_delay") or 0)
751740
),

upload/tests/test_upload.py

Lines changed: 26 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,8 @@
1515
from rest_framework import status
1616
from rest_framework.exceptions import NotFound, ValidationError
1717
from rest_framework.reverse import reverse
18-
from rest_framework.test import APIRequestFactory, APITestCase
18+
from rest_framework.test import APITestCase
19+
from shared.api_archive.archive import ArchiveService
1920
from shared.django_apps.core.tests.factories import OwnerFactory
2021
from shared.torngit.exceptions import (
2122
TorngitClientGeneralError,
@@ -37,7 +38,6 @@
3738
insert_commit,
3839
parse_headers,
3940
parse_params,
40-
store_report_in_redis,
4141
validate_upload,
4242
)
4343
from upload.tokenless.tokenless import TokenlessUploadHandler
@@ -713,31 +713,6 @@ def test_parse_request_headers(self):
713713
{"version": "v4"},
714714
) == {"content_type": "text/plain", "reduced_redundancy": True}
715715

716-
def test_store_report_in_redis(self):
717-
redis = MockRedis()
718-
719-
with self.subTest("gzip encoding"):
720-
assert (
721-
store_report_in_redis(
722-
APIRequestFactory().get("", HTTP_X_CONTENT_ENCODING="gzip"),
723-
"1c78206f1a46dc6db8412a491fc770eb7d0f8a47",
724-
"report",
725-
redis,
726-
)
727-
== "upload/1c78206/report/gzip"
728-
)
729-
730-
with self.subTest("plain encoding"):
731-
assert (
732-
store_report_in_redis(
733-
APIRequestFactory().get(""),
734-
"1c78206f1a46dc6db8412a491fc770eb7d0f8a47",
735-
"report",
736-
redis,
737-
)
738-
== "upload/1c78206/report/plain"
739-
)
740-
741716
def test_validate_upload_repository_moved(self):
742717
redis = MockRedis()
743718
owner = G(Owner, plan="users-free")
@@ -893,9 +868,10 @@ def test_dispatch_upload_task(self, upload):
893868
upload.assert_called_once_with(
894869
repoid=repo.repoid,
895870
commitid=task_arguments.get("commit"),
871+
report_type="coverage",
896872
report_code="local_report",
873+
arguments=task_arguments,
897874
countdown=4,
898-
report_type="coverage",
899875
)
900876

901877

@@ -986,6 +962,7 @@ def test_invalid_request_params_invalid_package(self):
986962

987963
assert response.status_code == status.HTTP_400_BAD_REQUEST
988964

965+
@patch("shared.api_archive.archive.ArchiveService.write_file")
989966
@patch("upload.views.legacy.get_redis_connection")
990967
@patch("upload.views.legacy.uuid4")
991968
@patch("upload.views.legacy.dispatch_upload_task")
@@ -997,6 +974,7 @@ def test_successful_upload_v2(
997974
mock_dispatch_upload,
998975
mock_uuid4,
999976
mock_get_redis,
977+
mock_write_file,
1000978
):
1001979
class MockRepoProviderAdapter:
1002980
async def get_commit(self, commit, token):
@@ -1033,6 +1011,14 @@ async def get_commit(self, commit, token):
10331011
)
10341012
assert headers["content-type"] != "text/plain"
10351013

1014+
archive_service = ArchiveService(self.repo)
1015+
datetime = timezone.now().strftime("%Y-%m-%d")
1016+
repo_hash = archive_service.get_archive_hash(self.repo)
1017+
expected_url = f"v4/raw/{datetime}/{repo_hash}/b521e55aef79b101f48e2544837ca99a7fa3bf6b/dec1f00b-1883-40d0-afd6-6dcb876510be.txt"
1018+
1019+
mock_write_file.assert_called_with(
1020+
expected_url, b"coverage report", is_already_gzipped=False
1021+
)
10361022
assert mock_dispatch_upload.call_args[0][0] == {
10371023
"commit": "b521e55aef79b101f48e2544837ca99a7fa3bf6b",
10381024
"token": "a03e5d02-9495-4413-b0d8-05651bb2e842",
@@ -1045,8 +1031,7 @@ async def get_commit(self, commit, token):
10451031
"build_url": None,
10461032
"branch": None,
10471033
"reportid": "dec1f00b-1883-40d0-afd6-6dcb876510be",
1048-
"redis_key": "upload/b521e55/dec1f00b-1883-40d0-afd6-6dcb876510be/plain",
1049-
"url": None,
1034+
"url": expected_url,
10501035
"job": None,
10511036
}
10521037

@@ -1060,6 +1045,7 @@ async def get_commit(self, commit, token):
10601045
== "https://app.codecov.io/github/codecovtest/upload-test-repo/commit/b521e55aef79b101f48e2544837ca99a7fa3bf6b"
10611046
)
10621047

1048+
@patch("shared.api_archive.archive.ArchiveService.write_file")
10631049
@patch("upload.views.legacy.get_redis_connection")
10641050
@patch("upload.views.legacy.uuid4")
10651051
@patch("upload.views.legacy.dispatch_upload_task")
@@ -1071,6 +1057,7 @@ def test_successful_upload_v2_slash(
10711057
mock_dispatch_upload,
10721058
mock_uuid4,
10731059
mock_get_redis,
1060+
mock_write_file,
10741061
):
10751062
class MockRepoProviderAdapter:
10761063
async def get_commit(self, commit, token):
@@ -1107,6 +1094,14 @@ async def get_commit(self, commit, token):
11071094
)
11081095
assert headers["content-type"] != "text/plain"
11091096

1097+
archive_service = ArchiveService(self.repo)
1098+
datetime = timezone.now().strftime("%Y-%m-%d")
1099+
repo_hash = archive_service.get_archive_hash(self.repo)
1100+
expected_url = f"v4/raw/{datetime}/{repo_hash}/b521e55aef79b101f48e2544837ca99a7fa3bf6b/dec1f00b-1883-40d0-afd6-6dcb876510be.txt"
1101+
1102+
mock_write_file.assert_called_with(
1103+
expected_url, b"coverage report", is_already_gzipped=False
1104+
)
11101105
assert mock_dispatch_upload.call_args[0][0] == {
11111106
"commit": "b521e55aef79b101f48e2544837ca99a7fa3bf6b",
11121107
"token": "a03e5d02-9495-4413-b0d8-05651bb2e842",
@@ -1119,8 +1114,7 @@ async def get_commit(self, commit, token):
11191114
"build_url": None,
11201115
"branch": None,
11211116
"reportid": "dec1f00b-1883-40d0-afd6-6dcb876510be",
1122-
"redis_key": "upload/b521e55/dec1f00b-1883-40d0-afd6-6dcb876510be/plain",
1123-
"url": None,
1117+
"url": expected_url,
11241118
"job": None,
11251119
}
11261120

upload/tests/views/test_bundle_analysis.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -87,9 +87,10 @@ def test_upload_bundle_analysis_success(db, client, mocker, mock_redis):
8787
upload.assert_called_with(
8888
commitid=commit_sha,
8989
repoid=repository.repoid,
90-
countdown=4,
9190
report_code=None,
9291
report_type="bundle_analysis",
92+
arguments=ANY,
93+
countdown=4,
9394
)
9495
mock_metrics.assert_called_with(
9596
**{
@@ -176,9 +177,10 @@ def test_upload_bundle_analysis_success_shelter(db, client, mocker, mock_redis):
176177
upload.assert_called_with(
177178
commitid=commit_sha,
178179
repoid=repository.repoid,
179-
countdown=4,
180180
report_code=None,
181181
report_type="bundle_analysis",
182+
arguments=ANY,
183+
countdown=4,
182184
)
183185
mock_metrics.assert_called_with(
184186
**{
@@ -260,9 +262,10 @@ def test_upload_bundle_analysis_existing_commit(db, client, mocker, mock_redis):
260262
upload.assert_called_with(
261263
commitid=commit.commitid,
262264
repoid=repository.repoid,
263-
countdown=4,
264265
report_code=None,
265266
report_type="bundle_analysis",
267+
arguments=ANY,
268+
countdown=4,
266269
)
267270
mock_metrics.assert_called_with(
268271
**{

upload/tests/views/test_test_results.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -74,8 +74,10 @@ def test_upload_test_results(db, client, mocker, mock_redis):
7474

7575
# saves args in Redis
7676
redis = get_redis_connection()
77-
args = redis.rpop(f"uploads/{repository.repoid}/{commit_sha}/test_results")
78-
assert json.loads(args) == {
77+
args = json.loads(
78+
redis.rpop(f"uploads/{repository.repoid}/{commit_sha}/test_results")
79+
)
80+
assert args == {
7981
"reportid": reportid,
8082
"build": "test-build",
8183
"build_url": "test-build-url",
@@ -95,9 +97,10 @@ def test_upload_test_results(db, client, mocker, mock_redis):
9597
upload.assert_called_with(
9698
commitid=commit_sha,
9799
repoid=repository.repoid,
98-
countdown=4,
99100
report_code=None,
100101
report_type="test_results",
102+
arguments=args,
103+
countdown=4,
101104
)
102105
mock_prometheus_metrics.assert_called_with(
103106
**{

upload/views/legacy.py

Lines changed: 19 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,6 @@
3535
insert_commit,
3636
parse_headers,
3737
parse_params,
38-
store_report_in_redis,
3938
validate_upload,
4039
)
4140
from upload.metrics import API_UPLOAD_COUNTER
@@ -201,8 +200,9 @@ def post(self, request, *args, **kwargs):
201200
# --------- Handle the actual upload
202201

203202
reportid = str(uuid4())
204-
path = None # populated later for v4 uploads when generating presigned PUT url
205-
redis_key = None # populated later for v2 uploads when storing report in Redis
203+
# populated later for `v4` uploads when generating presigned PUT url,
204+
# or by `v2` uploads when storing the report directly
205+
path = None
206206

207207
# Get the url where the commit details can be found on the Codecov site, we'll return this in the response
208208
destination_url = f"{settings.CODECOV_DASHBOARD_URL}/{owner.service}/{owner.username}/{repository.name}/commit/{commitid}"
@@ -212,7 +212,7 @@ def post(self, request, *args, **kwargs):
212212
repo_hash = archive_service.get_archive_hash(repository)
213213
default_path = f"v4/raw/{datetime}/{repo_hash}/{commitid}/{reportid}.txt"
214214

215-
# v2 - store request body in redis
215+
# v2 - store request body directly
216216
if version == "v2":
217217
log.info(
218218
"Started V2 upload",
@@ -224,15 +224,22 @@ def post(self, request, *args, **kwargs):
224224
upload_params=upload_params,
225225
),
226226
)
227-
redis_key = store_report_in_redis(request, commitid, reportid, redis)
227+
228+
path = default_path
229+
encoding = request.META.get("HTTP_X_CONTENT_ENCODING") or request.META.get(
230+
"HTTP_CONTENT_ENCODING"
231+
)
232+
archive_service.write_file(
233+
path, request.body, is_already_gzipped=(encoding == "gzip")
234+
)
228235

229236
log.info(
230-
"Stored coverage report in redis",
237+
"Stored coverage report",
231238
extra=dict(
232239
commit=commitid,
233240
upload_params=upload_params,
234241
reportid=reportid,
235-
redis_key=redis_key,
242+
path=path,
236243
repoid=repository.repoid,
237244
),
238245
)
@@ -271,6 +278,10 @@ def post(self, request, *args, **kwargs):
271278
path = default_path
272279

273280
try:
281+
# When using shelter (`is_shelter_request`), the returned `upload_url` is being
282+
# ignored, as shelter is handling the creation of a "presigned put" matching the
283+
# `storage_path`.
284+
# This code runs here just for backwards compatibility reasons:
274285
upload_url = archive_service.create_presigned_put(default_path)
275286
except Exception as e:
276287
log.warning(
@@ -311,10 +322,9 @@ def post(self, request, *args, **kwargs):
311322
**queue_params,
312323
"build_url": build_url,
313324
"reportid": reportid,
314-
"redis_key": redis_key, # location of report for v2 uploads; this will be "None" for v4 uploads
315325
"url": (
316326
path
317-
if path # If a path was generated for a v4 upload, pass that to the 'url' field, potentially overwriting it
327+
if path # If a path was generated for an upload, pass that to the 'url' field, potentially overwriting it
318328
else upload_params.get("url")
319329
),
320330
# These values below might be different from the initial request parameters, so overwrite them here to ensure they're up-to-date

0 commit comments

Comments
 (0)