Skip to content

Commit fdb1d69

Browse files
committed
Better moving system
1 parent 0cfa4af commit fdb1d69

File tree

3 files changed

+67
-99
lines changed

3 files changed

+67
-99
lines changed

archive/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,6 @@
33
Builds are stored on a Cloudflare R2 instance at `https://builds.rebootfn.org/versions.json`.
44
If you want to move them to another AWS-compatible object storage, run:
55
```
6-
move.ps1
6+
python move.py
77
```
88
and provide the required parameters.

archive/move.ps1

Lines changed: 0 additions & 98 deletions
This file was deleted.

archive/move.py

Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
import argparse
2+
import os
3+
import requests
4+
import boto3
5+
6+
from concurrent.futures import ThreadPoolExecutor, as_completed
7+
from urllib.parse import urlparse
8+
9+
def upload_url_to_s3(s3_client, bucket_name, url, object_key):
10+
response = requests.get(url, stream=True, verify=False, headers={"Cookie": "_c_t_c=1"})
11+
response.raise_for_status()
12+
s3_client.upload_fileobj(response.raw, bucket_name, object_key)
13+
return url, object_key
14+
15+
def derive_key_from_url(url, prefix=None):
16+
parsed = urlparse(url)
17+
filename = os.path.basename(parsed.path)
18+
if prefix:
19+
return f"{prefix}/{filename}"
20+
else:
21+
return filename
22+
23+
def main():
24+
parser = argparse.ArgumentParser(description="Upload multiple URLs from versions.txt to an S3 bucket concurrently.")
25+
parser.add_argument('--bucket', required=True, help="Name of the S3 bucket.")
26+
parser.add_argument('--concurrency', required=True, type=int, help="Number of concurrent uploads.")
27+
parser.add_argument('--versions-file', default='versions.txt', help="File containing one URL per line.")
28+
parser.add_argument('--access-key', required=True, help="AWS Access Key ID.")
29+
parser.add_argument('--secret-key', required=True, help="AWS Secret Access Key.")
30+
parser.add_argument('--endpoint-url', required=True, help="Custom endpoint URL for S3 or S3-compatible storage.")
31+
args = parser.parse_args()
32+
33+
bucket_name = args.bucket
34+
concurrency = args.concurrency
35+
versions_file = args.versions_file
36+
access_key = args.access_key
37+
secret_key = args.secret_key
38+
endpoint_url = args.endpoint_url
39+
40+
with open(versions_file, 'r') as f:
41+
urls = [line.strip() for line in f if line.strip()]
42+
43+
print(f"Uploading {len(urls)} files...")
44+
s3_params = {}
45+
if access_key and secret_key:
46+
s3_params['aws_access_key_id'] = access_key
47+
s3_params['aws_secret_access_key'] = secret_key
48+
if endpoint_url:
49+
s3_params['endpoint_url'] = endpoint_url
50+
51+
s3 = boto3.client('s3', **s3_params)
52+
53+
futures = []
54+
with ThreadPoolExecutor(max_workers=concurrency) as executor:
55+
for url in urls:
56+
object_key = derive_key_from_url(url)
57+
futures.append(executor.submit(upload_url_to_s3, s3, bucket_name, url, object_key))
58+
for future in as_completed(futures):
59+
try:
60+
uploaded_url, uploaded_key = future.result()
61+
print(f"Uploaded: {uploaded_url} -> s3://{bucket_name}/{uploaded_key}")
62+
except Exception as e:
63+
print(f"Error uploading: {e}")
64+
65+
if __name__ == "__main__":
66+
main()

0 commit comments

Comments
 (0)