diff --git a/ds3/ds3network.py b/ds3/ds3network.py index 1c8d2f5..adbf520 100644 --- a/ds3/ds3network.py +++ b/ds3/ds3network.py @@ -207,7 +207,7 @@ def send_request(self, request): if key == 'Content-Length': # Add to headers, but not to amz-headers headers[key] = value - elif key in ['Content-CRC32', 'Content-CRC32C', 'Content-MD5', 'Content-SHA256', 'Content-SHA512']: + elif key in ['Content-CRC32', 'Content-CRC32C', 'Content-MD5', 'Content-SHA256', 'Content-SHA512','Range']: headers[key] = value checksum = value elif not key.startswith('x-amz-meta-'): diff --git a/samples/getDataRange.py b/samples/getDataRange.py new file mode 100644 index 0000000..ca28c1d --- /dev/null +++ b/samples/getDataRange.py @@ -0,0 +1,45 @@ +import os +import tempfile + +from ds3 import ds3, ds3Helpers,ds3network +from ds3.ds3 import * +import time + + +# This example retrieves specified bytes=53687091100-53687091115 in the file in a specific bucket. +# The output will be written to tempname file. + + +client = ds3.Client("", ds3.Credentials("")) + +# create a dictionary to map bucket names to object names +object_dict={} +helper = ds3Helpers.Helper(client=client) + + + +file_path = os.path.join(os.path.dirname(str(__file__)), "") +bucketName = "" +fileName = "" +fd, tempname = tempfile.mkstemp() +print (tempname) +f = open(tempname, "wb") +bucketObjects = client.get_service(ds3.GetServiceRequest()) +print(bucketObjects) + +# Create a GetObjectRequest and set the range header to retrieve only those bytes. +# You can specify multiple ranges by separating them with commas. Example: +# req.headers['Range'] = 'bytes=0-1,3-4' +req = ds3.GetObjectRequest(bucketName, fileName, f) +req.headers['Range'] = 'bytes=53687091100-53687091115' +start_time = time.time() +getObjectResult = client.get_object(request= req) + + +f.close() +os.close(fd) + +print(getObjectResult.response.status) +end_time = time.time() +elapsed_time_total = end_time - start_time +print(f"Total elapsed time: {elapsed_time_total} seconds") diff --git a/samples/multipartupload.py b/samples/multipartupload.py new file mode 100644 index 0000000..e3971a8 --- /dev/null +++ b/samples/multipartupload.py @@ -0,0 +1,45 @@ +import os +import tempfile +from ds3 import ds3 +import time + + +# This example retrieves specified bytes=53687091100-53687091115 in the file in a specific bucket. +# The output will be written to tempname file. + + +client = ds3.createClientFromEnv() + +#Change the following values to match your environment +# Part size is the size of each upload part in bytes +PART_SIZE = 5 * 1024 * 1024 +BUCKET_NAME = "books" +OBJECT_KEY = "beowulf.txt" +FILE_PATH = "beowulf.txt" + +#First step is to intiate the multipart upload. This will return uploadId which is used in the next request. +req = ds3.InitiateMultiPartUploadRequest(BUCKET_NAME, OBJECT_KEY) +res = client.initiate_multi_part_upload(req) +uploadId = res.result['UploadId'] +file_size = os.path.getsize(FILE_PATH) +parts = [] + +with open(FILE_PATH, "rb") as file: + part_number = 1 + while True: + chunk = file.read(PART_SIZE) + if not chunk: + break + + req = ds3.PutMultiPartUploadPartRequest( + bucket_name=BUCKET_NAME, + object_name=OBJECT_KEY, + part_number=part_number, + upload_id=uploadId, + request_payload=chunk + ) + + multi_res = client.put_multi_part_upload_part(req) + part_number += 1 + +print (res.result) \ No newline at end of file