Skip to content

Commit 5f752c2

Browse files
gursewak1997jlebon
authored andcommitted
Factor out the S3 functions to cosalib
Since we use s3_check_exists and s3_copy at other places we factored out those functions to cosalib
1 parent 0d94b7c commit 5f752c2

File tree

2 files changed

+57
-62
lines changed

2 files changed

+57
-62
lines changed

src/cmd-buildupload

Lines changed: 2 additions & 61 deletions
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,7 @@ import sys
1010
import tempfile
1111
import subprocess
1212
import boto3
13-
from botocore.exceptions import ClientError, NoCredentialsError
14-
from tenacity import retry
13+
from cosalib.s3 import s3_copy, s3_check_exists
1514

1615
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
1716

@@ -22,13 +21,7 @@ CACHE_MAX_AGE_ARTIFACT = 60 * 60 * 24 * 365
2221
# set metadata caching to 5m
2322
CACHE_MAX_AGE_METADATA = 60 * 5
2423
from cosalib.builds import Builds, BUILDFILES
25-
from cosalib.cmdlib import (
26-
load_json,
27-
retry_stop_long,
28-
retry_wait_long,
29-
retry_boto_exception,
30-
retry_callback
31-
)
24+
from cosalib.cmdlib import load_json
3225

3326

3427
def main():
@@ -194,57 +187,5 @@ def s3_upload_build(s3_client, args, builddir, bucket, prefix):
194187
dry_run=args.dry_run)
195188

196189

197-
@retry(stop=retry_stop_long, wait=retry_wait_long,
198-
retry=retry_boto_exception, before_sleep=retry_callback)
199-
def s3_check_exists(s3_client, bucket, key, dry_run=False):
200-
print(f"Checking if bucket '{bucket}' has key '{key}'")
201-
try:
202-
s3_client.head_object(Bucket=bucket, Key=key)
203-
except ClientError as e:
204-
if e.response['Error']['Code'] == '404':
205-
return False
206-
raise e
207-
except NoCredentialsError as e:
208-
# It's reasonable to run without creds if doing a dry-run
209-
if dry_run:
210-
return False
211-
raise e
212-
return True
213-
214-
215-
@retry(stop=retry_stop_long, wait=retry_wait_long,
216-
retry=retry_boto_exception, retry_error_callback=retry_callback)
217-
def s3_copy(s3_client, src, bucket, key, max_age, acl, extra_args={}, dry_run=False):
218-
extra_args = dict(extra_args)
219-
if 'ContentType' not in extra_args:
220-
if key.endswith('.json'):
221-
extra_args['ContentType'] = 'application/json'
222-
elif key.endswith('.tar'):
223-
extra_args['ContentType'] = 'application/x-tar'
224-
elif key.endswith('.xz'):
225-
extra_args['ContentType'] = 'application/x-xz'
226-
elif key.endswith('.gz'):
227-
extra_args['ContentType'] = 'application/gzip'
228-
elif key.endswith('.iso'):
229-
extra_args['ContentType'] = 'application/x-iso9660-image'
230-
else:
231-
# use a standard MIME type for "binary blob" instead of the default
232-
# 'binary/octet-stream' AWS slaps on
233-
extra_args['ContentType'] = 'application/octet-stream'
234-
upload_args = {
235-
'CacheControl': f'max-age={max_age}',
236-
'ACL': acl
237-
}
238-
upload_args.update(extra_args)
239-
240-
print((f"{'Would upload' if dry_run else 'Uploading'} {src} to "
241-
f"s3://{bucket}/{key} with args {upload_args}"))
242-
243-
if dry_run:
244-
return
245-
246-
s3_client.upload_file(Filename=src, Bucket=bucket, Key=key, ExtraArgs=upload_args)
247-
248-
249190
if __name__ == '__main__':
250191
sys.exit(main())

src/cosalib/s3.py

Lines changed: 55 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,10 @@
11
import boto3
22

3-
from botocore.exceptions import ClientError
3+
from botocore.exceptions import ClientError, NoCredentialsError
44
from cosalib.cmdlib import (
55
retry_stop,
6+
retry_stop_long,
7+
retry_wait_long,
68
retry_boto_exception,
79
retry_callback
810
)
@@ -52,3 +54,55 @@ def delete_object(self, bucket, key):
5254
print("S3: deleting {sub_objects}")
5355
self.client.delete_objects(Bucket=bucket, Delete=sub_objects)
5456
self.client.delete_object(Bucket=bucket, Key=key)
57+
58+
59+
@retry(stop=retry_stop_long, wait=retry_wait_long,
60+
retry=retry_boto_exception, before_sleep=retry_callback)
61+
def s3_check_exists(s3_client, bucket, key, dry_run=False):
62+
print(f"Checking if bucket '{bucket}' has key '{key}'")
63+
try:
64+
s3_client.head_object(Bucket=bucket, Key=key)
65+
except ClientError as e:
66+
if e.response['Error']['Code'] == '404':
67+
return False
68+
raise e
69+
except NoCredentialsError as e:
70+
# It's reasonable to run without creds if doing a dry-run
71+
if dry_run:
72+
return False
73+
raise e
74+
return True
75+
76+
77+
@retry(stop=retry_stop_long, wait=retry_wait_long,
78+
retry=retry_boto_exception, retry_error_callback=retry_callback)
79+
def s3_copy(s3_client, src, bucket, key, max_age, acl, extra_args={}, dry_run=False):
80+
extra_args = dict(extra_args)
81+
if 'ContentType' not in extra_args:
82+
if key.endswith('.json'):
83+
extra_args['ContentType'] = 'application/json'
84+
elif key.endswith('.tar'):
85+
extra_args['ContentType'] = 'application/x-tar'
86+
elif key.endswith('.xz'):
87+
extra_args['ContentType'] = 'application/x-xz'
88+
elif key.endswith('.gz'):
89+
extra_args['ContentType'] = 'application/gzip'
90+
elif key.endswith('.iso'):
91+
extra_args['ContentType'] = 'application/x-iso9660-image'
92+
else:
93+
# use a standard MIME type for "binary blob" instead of the default
94+
# 'binary/octet-stream' AWS slaps on
95+
extra_args['ContentType'] = 'application/octet-stream'
96+
upload_args = {
97+
'CacheControl': f'max-age={max_age}',
98+
'ACL': acl
99+
}
100+
upload_args.update(extra_args)
101+
102+
print((f"{'Would upload' if dry_run else 'Uploading'} {src} to "
103+
f"s3://{bucket}/{key} {extra_args if len(extra_args) else ''}"))
104+
105+
if dry_run:
106+
return
107+
108+
s3_client.upload_file(Filename=src, Bucket=bucket, Key=key, ExtraArgs=upload_args)

0 commit comments

Comments
 (0)