Skip to content

Commit f53bb77

Browse files
committed
Implement retries since our mirror worker seems to be flaky
1 parent 25b55a5 commit f53bb77

File tree

1 file changed

+48
-0
lines changed

1 file changed

+48
-0
lines changed

packages/script.py

Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,8 @@
99
import re
1010
import sys
1111
import hashlib
12+
import time
13+
import requests
1214
from datetime import datetime
1315

1416
import import_tests
@@ -89,14 +91,60 @@ def upload_to_r2(tag, dist = Path("dist")):
8991
aws_secret_access_key = os.environ.get("R2_SECRET_ACCESS_KEY"),
9092
region_name="auto")
9193

94+
files_remaining = []
95+
9296
# upload entire dist directory to r2
9397
for root, dirs, files in os.walk(dist):
9498
for file in files:
9599
path = Path(root) / file
96100
key = tag + "/" + str(path.relative_to(dist))
101+
files_remaining.append((path, key))
102+
103+
# attempt to upload each file 5 times. If after 5 attempts the file is still not accessible at pyodide.edgeworker.net then give up
104+
for i in range(5):
105+
for (path, key) in files_remaining:
97106
print(f"uploading {path} to {key}")
98107
s3.upload_file(str(path), "python-package-bucket", key)
99108

109+
new_files_remaining = []
110+
111+
time.sleep(10)
112+
113+
for (path, key) in files_remaining:
114+
# Construct URL to fetch the uploaded file
115+
url = f"https://pyodide.edgeworker.net/python-package-bucket/{key}"
116+
print(f"Checking {url}")
117+
118+
try:
119+
# Download the file content from the URL
120+
response = requests.get(url)
121+
response.raise_for_status() # Raise an exception if the status is not 200 OK
122+
123+
# Read the local file content
124+
with open(path, 'rb') as f:
125+
local_content = f.read()
126+
127+
# Compare contents
128+
if local_content == response.content:
129+
print(f"{path} uploaded successfully.")
130+
else:
131+
print(f"Content mismatch for {path}. Retrying...")
132+
new_files_remaining.append((path, key))
133+
except requests.exceptions.RequestException as e:
134+
print(f"Failed to verify {path}: {e}. Retrying...")
135+
new_files_remaining.append((path, key))
136+
137+
if not new_files_remaining:
138+
break
139+
140+
for (path, key) in new_files_remaining:
141+
s3.delete_object(Bucket="python-package-bucket", Key=key)
142+
143+
files_remaining = new_files_remaining
144+
145+
if files_remaining:
146+
raise Exception("Failed to upload packages after 5 attempts: ", files_remaining)
147+
100148
# converts all the .zip wheels into .tar.gz format (destructively)
101149
def convert_wheels_to_tar_gz(dist = Path("dist")):
102150
with open(dist / "pyodide-lock.json", "r") as file:

0 commit comments

Comments
 (0)