1
- import os
1
+ import hashlib
2
2
import json
3
- from pathlib import Path
3
+ import os
4
+ import re
5
+ import subprocess
6
+ import sys
7
+ import tarfile
4
8
import tempfile
5
9
import zipfile
6
- import tarfile
7
- import uuid
10
+ from pathlib import Path
11
+ from textwrap import dedent
12
+
8
13
import boto3
9
14
import re
10
15
import sys
11
16
import hashlib
12
17
import time
13
18
import requests
14
- from datetime import datetime
15
19
16
20
import import_tests
17
21
18
- def normalize (name ):
22
+
23
+ def normalize (name : str ) -> str :
19
24
return re .sub (r"[-_.]+" , "-" , name ).lower ()
20
25
26
+
21
27
# See setup.sh
22
28
# prerequisite: emsdk, pyodide, packages -> pyodide/packages
23
29
24
- def gen_bzl_config (tag , dist ):
25
- bucket_url = "https://pyodide.edgeworker.net/python-package-bucket/" + tag + "/"
26
- github_url = "https://github.com/cloudflare/pyodide-build-scripts/releases/download/" + tag + "/"
30
+
31
+ def gen_bzl_config (tag : str , dist : Path ) -> None :
32
+ bucket_url = f"https://pyodide.edgeworker.net/python-package-bucket/{ tag } /"
33
+ github_url = (
34
+ f"https://github.com/cloudflare/pyodide-build-scripts/releases/download/{ tag } /"
35
+ )
27
36
lock_bytes = (dist / "pyodide-lock.json" ).read_bytes ()
28
37
lock_hash = hashlib .sha256 (lock_bytes ).hexdigest ()
29
38
zip_bytes = (dist / "pyodide_packages.tar.zip" ).read_bytes ()
@@ -33,24 +42,36 @@ def gen_bzl_config(tag, dist):
33
42
34
43
with open (dist / "pyodide-lock.json" , "r" ) as file :
35
44
lock = json .load (file )
36
- packages = [package ["name" ] for package in lock ["packages" ].values ()]
37
- imports_to_test = import_tests .gen (packages )
38
-
39
- with open ("pyodide_bucket.bzl" , "w" ) as f :
40
- f .write ("# Do not edit this file by hand. See docs/pyodide.md for info on how to generate it.\n " )
41
- f .write ("# These variables are factored out here because they are being shared by the WORKSPACE files in\n " )
42
- f .write ("# both edgeworker and workerd, as well as src/pyodide/BUILD.bazel\n " )
43
- f .write ("PYODIDE_PACKAGE_BUCKET_URL = \" " + bucket_url + "\" \n " )
44
- f .write ("PYODIDE_GITHUB_RELEASE_URL = \" " + github_url + "\" \n " )
45
- f .write ("PYODIDE_LOCK_SHA256 = \" " + lock_hash + "\" \n " )
46
- f .write ("PYODIDE_PACKAGES_TAR_ZIP_SHA256 = \" " + zip_hash + "\" \n " )
47
- f .write ("PYODIDE_ALL_WHEELS_ZIP_SHA256 = \" " + all_wheels_hash + "\" \n \n " )
48
- f .write ("# IMPORTANT: when updating this file in git, check the diff to make sure none of the imports below are being removed unexpectedly\n " )
49
- f .write ("PYODIDE_IMPORTS_TO_TEST = " + json .dumps (imports_to_test , indent = 3 , sort_keys = True ) + "\n " )
45
+ packages = [package ["name" ] for package in lock ["packages" ].values ()]
46
+ imports_to_test = import_tests .gen (packages )
47
+
48
+ Path ("pyodide_bucket.bzl" ).write_text (
49
+ dedent (
50
+ f"""
51
+ # Do not edit this file by hand. See docs/pyodide.md for info on how to generate it.
52
+ # These variables are factored out here because they are being shared by the WORKSPACE files in
53
+ # both edgeworker and workerd, as well as src/pyodide/BUILD.bazel
54
+
55
+ PYODIDE_PACKAGE_BUCKET_URL = "{ bucket_url } "
56
+ PYODIDE_GITHUB_RELEASE_URL = "{ github_url } "
57
+ PYODIDE_LOCK_SHA256 = "{ lock_hash } "
58
+ PYODIDE_PACKAGES_TAR_ZIP_SHA256 = "{ zip_hash } "
59
+ PYODIDE_ALL_WHEELS_ZIP_SHA256 = "{ all_wheels_hash } "
60
+
61
+ # IMPORTANT: when updating this file in git, check the diff to make sure none of the imports below are being removed unexpectedly
62
+
63
+ PYODIDE_IMPORTS_TO_TEST =
64
+ """
65
+ ).strip ()
66
+ + " "
67
+ + json .dumps (imports_to_test , indent = 3 , sort_keys = True )
68
+ + "\n "
69
+ )
70
+
50
71
51
72
# creates a package bundle .tar.zip file to be bundled in with edgeworker
52
73
# the resulting bundle is written to dist/pyodide_packages.tar.zip
53
- def make_bundle (tag , dist = Path ("dist" )):
74
+ def make_bundle (tag : str , dist : Path = Path ("dist" )) -> None :
54
75
with open (dist / "pyodide-lock.json" , "r" ) as file :
55
76
lock = json .load (file )
56
77
with tempfile .TemporaryDirectory (delete = False ) as t :
@@ -62,8 +83,8 @@ def make_bundle(tag, dist = Path("dist")):
62
83
for package in lock ["packages" ].values ():
63
84
name = normalize (package ["name" ])
64
85
print ("untarring " + name )
65
- os . mkdir (tempdir / name )
66
- if name .endswith ("-tests" ) or name == "test" :
86
+ (tempdir / name ). mkdir ( )
87
+ if name .endswith ("-tests" ) or name == "test" :
67
88
continue
68
89
file = dist / package ["file_name" ]
69
90
with tarfile .open (file , "r:gz" ) as zip :
@@ -72,27 +93,37 @@ def make_bundle(tag, dist = Path("dist")):
72
93
with tarfile .open (tempdir / "pyodide_packages.tar" , "w" ) as tar :
73
94
tar .add (tempdir , arcname = "./" )
74
95
# create zip file in dist/ from tarfile
75
- with zipfile .ZipFile (dist / "pyodide_packages.tar.zip" , "w" , compression = zipfile .ZIP_DEFLATED ) as zip :
96
+ with zipfile .ZipFile (
97
+ dist / "pyodide_packages.tar.zip" , "w" , compression = zipfile .ZIP_DEFLATED
98
+ ) as zip :
76
99
zip .write (tempdir / "pyodide_packages.tar" , "pyodide_packages.tar" )
77
100
# create all_wheels.zip file for testing
78
- with zipfile .ZipFile (dist / "all_wheels.zip" , "w" , compression = zipfile .ZIP_DEFLATED ) as zip :
101
+ with zipfile .ZipFile (
102
+ dist / "all_wheels.zip" , "w" , compression = zipfile .ZIP_DEFLATED
103
+ ) as zip :
79
104
for package in lock ["packages" ].values ():
80
105
file = dist / package ["file_name" ]
81
106
zip .write (file , f"{ package ['file_name' ]} " )
82
-
107
+
83
108
gen_bzl_config (tag , dist )
84
109
110
+
85
111
# uploads everything in dist to python-package-bucket at tag/...
86
- def upload_to_r2 (tag , dist = Path ("dist" )):
112
+ def upload_to_r2 (tag : str , dist = Path ("dist" )) -> None :
87
113
# upload to r2
88
- s3 = boto3 .client ("s3" ,
89
- endpoint_url = "https://" + os .environ .get ("R2_ACCOUNT_ID" ) + ".r2.cloudflarestorage.com" ,
90
- aws_access_key_id = os .environ .get ("R2_ACCESS_KEY_ID" ),
91
- aws_secret_access_key = os .environ .get ("R2_SECRET_ACCESS_KEY" ),
92
- region_name = "auto" )
93
-
114
+ r2_account_id = os .environ ["R2_ACCOUNT_ID" ]
115
+ r2_access_key = os .environ .get ("R2_ACCESS_KEY_ID" )
116
+ r2_secret_access_key = os .environ .get ("R2_SECRET_ACCESS_KEY" )
117
+ s3 = boto3 .client (
118
+ "s3" ,
119
+ endpoint_url = f"https://{ r2_account_id } .r2.cloudflarestorage.com" ,
120
+ aws_access_key_id = r2_access_key ,
121
+ aws_secret_access_key = r2_secret_access_key ,
122
+ region_name = "auto" ,
123
+ )
124
+
94
125
files_remaining = []
95
-
126
+
96
127
# upload entire dist directory to r2, excluding all_wheels.zip and pyodide_packages.tar.zip
97
128
for root , dirs , files in os .walk (dist ):
98
129
for file in files :
@@ -101,30 +132,30 @@ def upload_to_r2(tag, dist = Path("dist")):
101
132
path = Path (root ) / file
102
133
key = tag + "/" + str (path .relative_to (dist ))
103
134
files_remaining .append ((path , key ))
104
-
135
+
105
136
# attempt to upload each file 5 times. If after 5 attempts the file is still not accessible at pyodide.edgeworker.net then give up
106
137
ATTEMPTS = 5
107
138
for i in range (ATTEMPTS ):
108
- for ( path , key ) in files_remaining :
139
+ for path , key in files_remaining :
109
140
print (f"uploading { path } to { key } " )
110
141
s3 .upload_file (str (path ), "python-package-bucket" , key )
111
142
112
143
new_files_remaining = []
113
144
114
145
time .sleep (10 )
115
146
116
- for ( path , key ) in files_remaining :
147
+ for path , key in files_remaining :
117
148
# Construct URL to fetch the uploaded file
118
149
url = f"https://pyodide.edgeworker.net/python-package-bucket/{ key } "
119
150
print (f"Checking { url } " )
120
-
151
+
121
152
try :
122
153
# Download the file content from the URL
123
154
response = requests .get (url )
124
155
response .raise_for_status () # Raise an exception if the status is not 200 OK
125
-
156
+
126
157
# Read the local file content
127
- with open (path , 'rb' ) as f :
158
+ with open (path , "rb" ) as f :
128
159
local_content = f .read ()
129
160
130
161
# Compare contents
@@ -143,19 +174,19 @@ def upload_to_r2(tag, dist = Path("dist")):
143
174
break
144
175
145
176
if i != ATTEMPTS - 1 :
146
- for ( path , key ) in files_remaining :
177
+ for path , key in files_remaining :
147
178
s3 .delete_object (Bucket = "python-package-bucket" , Key = key )
148
179
149
180
if files_remaining :
150
181
raise Exception ("Failed to upload packages after 5 attempts: " , files_remaining )
151
182
183
+
152
184
# converts all the .zip wheels into .tar.gz format (destructively)
153
- def convert_wheels_to_tar_gz (dist = Path ("dist" )):
185
+ def convert_wheels_to_tar_gz (dist : Path = Path ("dist" )) -> None :
154
186
with open (dist / "pyodide-lock.json" , "r" ) as file :
155
187
lock = json .load (file )
156
-
188
+
157
189
for package in lock ["packages" ].values ():
158
- name = normalize (package ["name" ])
159
190
file = dist / package ["file_name" ]
160
191
# check file ends with .zip or .whl
161
192
if not (file .name .endswith (".zip" ) or file .name .endswith (".whl" )):
@@ -169,16 +200,17 @@ def convert_wheels_to_tar_gz(dist = Path("dist")):
169
200
# create tar.gz file from tempdir
170
201
with tarfile .open (new_file , "w:gz" ) as tar :
171
202
tar .add (tempdir , arcname = "./" )
172
- os . remove ( file )
203
+ file . unlink ( )
173
204
package ["file_name" ] = new_file .name
174
205
# update sha256 hash
175
206
new_file_bytes = new_file .read_bytes ()
176
207
new_file_hash = hashlib .sha256 (new_file_bytes ).hexdigest ()
177
208
package ["sha256" ] = new_file_hash
178
-
209
+
179
210
with open (dist / "pyodide-lock.json" , "w" ) as file :
180
211
json .dump (lock , file )
181
212
213
+
182
214
if __name__ == "__main__" :
183
215
if len (sys .argv ) != 2 :
184
216
print ("Usage: python script.py <tag>" )
@@ -187,11 +219,14 @@ def convert_wheels_to_tar_gz(dist = Path("dist")):
187
219
188
220
with open ("required_packages.txt" , "r" ) as file :
189
221
required_packages = file .read ().split ("\n " )
190
- status = os .system (f"pyodide build-recipes --install { ' ' .join (required_packages )} " )
191
- if status != 0 :
192
- raise Exception ("Failed to build recipes" )
193
-
222
+ result = subprocess .run (
223
+ ["pyodide" , "build-recipes" , "--install" , * required_packages ]
224
+ )
225
+ if result .returncode != 0 :
226
+ print ("Failed to build recipes" , file = sys .stderr )
227
+ sys .exit (result .returncode )
228
+
194
229
convert_wheels_to_tar_gz ()
195
-
230
+
196
231
make_bundle (tag )
197
- upload_to_r2 (tag )
232
+ upload_to_r2 (tag )
0 commit comments