Skip to content

Commit 32929e1

Browse files
authored
Merge pull request #21 from RachelTucker/merge_4_1
Merging 4.1 API
2 parents 09f68db + 4516c45 commit 32929e1

File tree

2 files changed

+88
-11
lines changed

2 files changed

+88
-11
lines changed

ds3/ds3.py

Lines changed: 63 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -291,8 +291,10 @@ def __init__(self):
291291
self.attributes = []
292292
self.elements = {
293293
'Activated': None,
294+
'AllowNewJobRequests': None,
294295
'AutoActivateTimeoutInMins': None,
295296
'AutoInspect': None,
297+
'CacheAvailableRetryAfterInSeconds': None,
296298
'DefaultImportConflictResolutionMode': None,
297299
'DefaultVerifyDataAfterImport': None,
298300
'DefaultVerifyDataPriorToImport': None,
@@ -1030,6 +1032,7 @@ def __init__(self):
10301032
'MfgSerialNumber': None,
10311033
'PartitionId': None,
10321034
'Quiesced': None,
1035+
'ReservedTaskType': None,
10331036
'SerialNumber': None,
10341037
'State': None,
10351038
'TapeId': None,
@@ -1073,6 +1076,8 @@ def __init__(self):
10731076
'Id': None,
10741077
'ImportExportConfiguration': None,
10751078
'LibraryId': None,
1079+
'MinimumReadReservedDrives': None,
1080+
'MinimumWriteReservedDrives': None,
10761081
'Name': None,
10771082
'Quiesced': None,
10781083
'SerialId': None,
@@ -1485,6 +1490,8 @@ def __init__(self):
14851490
'Id': None,
14861491
'ImportExportConfiguration': None,
14871492
'LibraryId': None,
1493+
'MinimumReadReservedDrives': None,
1494+
'MinimumWriteReservedDrives': None,
14881495
'Name': None,
14891496
'Quiesced': None,
14901497
'SerialId': None,
@@ -1758,6 +1765,8 @@ def __init__(self):
17581765
'Id': None,
17591766
'ImportExportConfiguration': None,
17601767
'LibraryId': None,
1768+
'MinimumReadReservedDrives': None,
1769+
'MinimumWriteReservedDrives': None,
17611770
'Name': None,
17621771
'Quiesced': None,
17631772
'SerialId': None,
@@ -3036,14 +3045,18 @@ def __init__(self, full_details=None):
30363045

30373046
class ModifyDataPathBackendSpectraS3Request(AbstractRequest):
30383047

3039-
def __init__(self, activated=None, auto_activate_timeout_in_mins=None, auto_inspect=None, default_import_conflict_resolution_mode=None, default_verify_data_after_import=None, default_verify_data_prior_to_import=None, partially_verify_last_percent_of_tapes=None, unavailable_media_policy=None, unavailable_pool_max_job_retry_in_mins=None, unavailable_tape_partition_max_job_retry_in_mins=None):
3048+
def __init__(self, activated=None, allow_new_job_requests=None, auto_activate_timeout_in_mins=None, auto_inspect=None, cache_available_retry_after_in_seconds=None, default_import_conflict_resolution_mode=None, default_verify_data_after_import=None, default_verify_data_prior_to_import=None, partially_verify_last_percent_of_tapes=None, unavailable_media_policy=None, unavailable_pool_max_job_retry_in_mins=None, unavailable_tape_partition_max_job_retry_in_mins=None):
30403049
super(ModifyDataPathBackendSpectraS3Request, self).__init__()
30413050
if activated is not None:
30423051
self.query_params['activated'] = activated
3052+
if allow_new_job_requests is not None:
3053+
self.query_params['allow_new_job_requests'] = allow_new_job_requests
30433054
if auto_activate_timeout_in_mins is not None:
30443055
self.query_params['auto_activate_timeout_in_mins'] = auto_activate_timeout_in_mins
30453056
if auto_inspect is not None:
30463057
self.query_params['auto_inspect'] = auto_inspect
3058+
if cache_available_retry_after_in_seconds is not None:
3059+
self.query_params['cache_available_retry_after_in_seconds'] = cache_available_retry_after_in_seconds
30473060
if default_import_conflict_resolution_mode is not None:
30483061
self.query_params['default_import_conflict_resolution_mode'] = default_import_conflict_resolution_mode
30493062
if default_verify_data_after_import is not None:
@@ -6423,7 +6436,7 @@ def __init__(self, tape_drive_id):
64236436

64246437
class GetTapeDrivesSpectraS3Request(AbstractRequest):
64256438

6426-
def __init__(self, last_page=None, page_length=None, page_offset=None, page_start_marker=None, partition_id=None, serial_number=None, state=None, type=None):
6439+
def __init__(self, last_page=None, page_length=None, page_offset=None, page_start_marker=None, partition_id=None, reserved_task_type=None, serial_number=None, state=None, type=None):
64276440
super(GetTapeDrivesSpectraS3Request, self).__init__()
64286441
if last_page is not None:
64296442
self.query_params['last_page'] = last_page
@@ -6435,6 +6448,8 @@ def __init__(self, last_page=None, page_length=None, page_offset=None, page_star
64356448
self.query_params['page_start_marker'] = page_start_marker
64366449
if partition_id is not None:
64376450
self.query_params['partition_id'] = partition_id
6451+
if reserved_task_type is not None:
6452+
self.query_params['reserved_task_type'] = reserved_task_type
64386453
if serial_number is not None:
64396454
self.query_params['serial_number'] = serial_number
64406455
if state is not None:
@@ -6740,20 +6755,26 @@ def __init__(self, quiesced):
67406755

67416756
class ModifyTapeDriveSpectraS3Request(AbstractRequest):
67426757

6743-
def __init__(self, tape_drive_id, quiesced=None):
6758+
def __init__(self, tape_drive_id, quiesced=None, reserved_task_type=None):
67446759
super(ModifyTapeDriveSpectraS3Request, self).__init__()
67456760
self.tape_drive_id = tape_drive_id
67466761
if quiesced is not None:
67476762
self.query_params['quiesced'] = quiesced
6763+
if reserved_task_type is not None:
6764+
self.query_params['reserved_task_type'] = reserved_task_type
67486765
self.path = '/_rest_/tape_drive/' + tape_drive_id
67496766
self.http_verb = HttpVerb.PUT
67506767

67516768

67526769
class ModifyTapePartitionSpectraS3Request(AbstractRequest):
67536770

6754-
def __init__(self, tape_partition, quiesced=None):
6771+
def __init__(self, tape_partition, minimum_read_reserved_drives=None, minimum_write_reserved_drives=None, quiesced=None):
67556772
super(ModifyTapePartitionSpectraS3Request, self).__init__()
67566773
self.tape_partition = tape_partition
6774+
if minimum_read_reserved_drives is not None:
6775+
self.query_params['minimum_read_reserved_drives'] = minimum_read_reserved_drives
6776+
if minimum_write_reserved_drives is not None:
6777+
self.query_params['minimum_write_reserved_drives'] = minimum_write_reserved_drives
67576778
if quiesced is not None:
67586779
self.query_params['quiesced'] = quiesced
67596780
self.path = '/_rest_/tape_partition/' + tape_partition
@@ -7957,11 +7978,47 @@ def process_response(self, response):
79577978

79587979

79597980
class HeadObjectResponse(AbstractResponse):
7960-
7981+
def __init__(self, response, request):
7982+
self.blob_checksums = {}
7983+
self.blob_checksum_type = 'NONE'
7984+
super(HeadObjectResponse, self).__init__(response, request)
7985+
7986+
def __process_checksum_headers(self, headers):
7987+
"""
7988+
Processes the blob checksum headers.
7989+
:param headers: list of tuples containing the Http response headers
7990+
"""
7991+
self.__process_checksum_type(headers)
7992+
self.__process_blob_checksums(headers)
7993+
7994+
def __process_checksum_type(self, headers):
7995+
"""
7996+
Parses the blob checksum type header. If there is no header, the default is NONE.
7997+
If there are multiple headers, then an error is raised
7998+
:param headers: list of tuples containing the Http response headers
7999+
"""
8000+
checksum_type_header = [item for item in headers if item[0] == 'ds3-blob-checksum-type']
8001+
if len(checksum_type_header) == 0:
8002+
return
8003+
if len(checksum_type_header) > 1:
8004+
raise ValueError("Expected only one header with key 'ds3-blob-checksum-type' but got: " + str(checksum_type_header))
8005+
self.blob_checksum_type = checksum_type_header[0][1]
8006+
8007+
def __process_blob_checksums(self, headers):
8008+
"""
8009+
Parses the blob checksum headers and adds them to a dictionary which maps
8010+
blob offset to blob checksum.
8011+
:param headers: list of tuples containing the Http response headers
8012+
"""
8013+
# Retrieves all the headers that start with 'ds3-blob-checksum-offset-'
8014+
# and converts the offset at the end of the header key into an integer.
8015+
checksum_list = [(int(key[25:]), val) for key, val in headers if key.startswith('ds3-blob-checksum-offset-')]
8016+
self.blob_checksums = dict(checksum_list)
8017+
79618018
def process_response(self, response):
79628019
self.__check_status_codes__([200, 403, 404])
7963-
self.status_code = self.response.status
79648020
if self.response.status == 200:
8021+
self.__process_checksum_headers(response.getheaders())
79658022
self.result = HeadRequestStatus.EXISTS
79668023
elif self.response.status == 403:
79678024
self.result = HeadRequestStatus.NOTAUTHORIZED

tests/clientTests.py

Lines changed: 25 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -466,12 +466,9 @@ def testDeleteObjectUnicode(self):
466466
def testDeleteObjectBadInput(self):
467467
"""tests deleteObject: bad input to function"""
468468
self.createBucket(bucketName)
469-
470-
noNameBucket = ""
469+
471470
notHereBucket = "not-here"
472-
badBuckets = {DeleteObjectRequest(noNameBucket, ""):statusCodeList(400),
473-
DeleteObjectRequest(noNameBucket, "badFile"):statusCodeList(404),
474-
DeleteObjectRequest(notHereBucket, ""): statusCodeList(404),
471+
badBuckets = {DeleteObjectRequest(notHereBucket, ""): statusCodeList(404),
475472
DeleteObjectRequest(notHereBucket, "badFile"): statusCodeList(404),
476473
DeleteObjectRequest(bucketName, ""): statusCodeList(400),
477474
DeleteObjectRequest(bucketName, "badFile"): statusCodeList(404)}
@@ -1459,3 +1456,26 @@ def testMarkSuspectBlobS3TargetsAsDegradedRequestPayload(self):
14591456
def testMarkSuspectBlobTapesAsDegradedRequestPayload(self):
14601457
request = MarkSuspectBlobTapesAsDegradedSpectraS3Request(id_list=self.__get_test_ids())
14611458
self.assertEqual(request.body, self.__get_marshaled_ids())
1459+
1460+
def testHeadObject(self):
1461+
bucket_name = "test-bucket"
1462+
object_name = "test-object"
1463+
request = HeadObjectRequest(bucket_name=bucket_name, object_name=object_name)
1464+
1465+
response_headers = [
1466+
('x-amz-meta-key', 'value'),
1467+
('ds3-blob-checksum-type', 'MD5'),
1468+
('ds3-blob-checksum-offset-0', '4nQGNX4nyz0pi8Hvap79PQ=='),
1469+
('ds3-blob-checksum-offset-10485760', '965Aa0/n8DlO1IwXYFh4bg=='),
1470+
('ds3-blob-checksum-offset-20971520', 'iV2OqJaXJ/jmqgRSb1HmFA==')
1471+
]
1472+
1473+
mocked_response = MockedHttpResponse(200, headers=response_headers)
1474+
1475+
response = HeadObjectResponse(mocked_response, request)
1476+
1477+
self.assertEqual(response.blob_checksum_type, 'MD5')
1478+
self.assertEqual(response.blob_checksums, {
1479+
0: '4nQGNX4nyz0pi8Hvap79PQ==',
1480+
10485760: '965Aa0/n8DlO1IwXYFh4bg==',
1481+
20971520: 'iV2OqJaXJ/jmqgRSb1HmFA=='})

0 commit comments

Comments
 (0)