1+ import hashlib
12import logging
23import os
4+ import boto3
5+ from botocore .exceptions import ClientError
36import pecan
47from pecan import response
58from pecan .secure import secure
@@ -26,6 +29,7 @@ def __init__(self, arch):
2629 self .distro_version = request .context ['distro_version' ]
2730 self .ref = request .context ['ref' ]
2831 self .sha1 = request .context ['sha1' ]
32+ self .checksum = None
2933 request .context ['arch' ] = self .arch
3034
3135 @expose (generic = True , template = 'json' )
@@ -89,7 +93,7 @@ def index_post(self):
8993 if request .POST .get ('force' , False ) is False :
9094 error ('/errors/invalid' , 'resource already exists and "force" key was not used' )
9195
92- full_path = self .save_file (file_obj )
96+ full_path , size = self .save_file (file_obj )
9397
9498 if self .binary is None :
9599 path = full_path
@@ -102,14 +106,21 @@ def index_post(self):
102106 self .binary = Binary (
103107 self .binary_name , self .project , arch = arch ,
104108 distro = distro , distro_version = distro_version ,
105- ref = ref , sha1 = sha1 , path = path , size = os .path .getsize (path )
109+ ref = ref , sha1 = sha1 , path = path , size = size ,
110+ checksum = self .checksum
106111 )
107112 else :
108113 self .binary .path = full_path
114+ self .binary .checksum = self .checksum
109115
110116 # check if this binary is interesting for other configured projects,
111117 # and if so, then mark those other repos so that they can be re-built
112118 self .mark_related_repos ()
119+
120+ # Remove the local file after S3 upload
121+ if pecan .conf .storage_method == 's3' :
122+ os .remove (full_path )
123+
113124 return dict ()
114125
115126 def mark_related_repos (self ):
@@ -175,8 +186,41 @@ def save_file(self, file_obj):
175186 for chunk in file_iterable :
176187 f .write (chunk )
177188
189+ self .checksum = self .generate_checksum (destination )
190+
191+ if pecan .conf .storage_method == 's3' :
192+ bucket = pecan .conf .bucket
193+ object_destination = os .path .relpath (destination , pecan .conf .binary_root )
194+
195+ s3_client = boto3 .client ('s3' )
196+ try :
197+ with open (destination , 'rb' ) as f :
198+ s3_client .put_object (Body = f ,
199+ Bucket = bucket ,
200+ Key = object_destination ,
201+ ChecksumAlgorithm = 'sha256' ,
202+ ChecksumSHA256 = self .checksum
203+ )
204+ except ClientError as e :
205+ error ('/errors/error/' , 'file object upload to S3 failed with error %s' % e )
206+
207+ size = os .path .getsize (destination )
208+
178209 # return the full path to the saved object:
179- return destination
210+ return destination , size
211+
212+ def generate_checksum (self , binary ):
213+ # S3 requires SHA256
214+ chsum = None
215+ if pecan .conf .storage_method == 's3' :
216+ chsum = hashlib .sha256 ()
217+ else :
218+ chsum = hashlib .sha512 ()
219+
220+ with open (binary , 'rb' ) as f :
221+ for chunk in iter (lambda : f .read (4096 ), b'' ):
222+ chsum .update (chunk )
223+ return chsum .hexdigest ()
180224
181225 @expose ()
182226 def _lookup (self , name , * remainder ):
0 commit comments