Skip to content

Commit fca8a2d

Browse files
Releasing version 3.20.3
Releasing version 3.20.3
2 parents 1b9bfcb + ad80f19 commit fca8a2d

File tree

13 files changed

+387
-286
lines changed

13 files changed

+387
-286
lines changed

CHANGELOG.rst

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,15 @@ All notable changes to this project will be documented in this file.
66

77
The format is based on `Keep a Changelog <http://keepachangelog.com/>`__.
88

9-
3.20.2 - 2022-11-15
9+
3.20.3 - 2022-11-22
1010
--------------------
11+
Fixed
12+
~~~~~
1113

14+
* Upgraded the cryptography version to (>=3.2.1,<39.0.0) to fix the `OpenSSL Security bug <https://www.openssl.org/blog/blog/2022/11/01/email-address-overflows/>`_
15+
16+
3.20.2 - 2022-11-15
17+
--------------------
1218
Added
1319
~~~~~
1420

THIRD_PARTY_LICENSES.txt

Lines changed: 265 additions & 195 deletions
Large diffs are not rendered by default.

requirements.txt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,13 +8,13 @@ certifi
88
cffi>=1.9.1
99
click==7.1.2
1010
coverage==4.5.2
11-
cryptography>=3.2.1,<=37.0.2
11+
cryptography>=3.2.1,<39.0.0
1212
httpsig-cffi==15.0.0
1313
Jinja2==3.0.3
1414
jmespath==0.10.0
1515
ndg-httpsclient==0.4.2
1616
mock==2.0.0
17-
oci==2.88.1
17+
oci==2.88.2
1818
packaging==20.2
1919
pluggy==0.13.0
2020
py==1.10.0

scripts/install/install.py

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -304,13 +304,12 @@ def install_cli(install_dir, tmp_dir, version, optional_features, dependency_dir
304304
# Check if we should install a local full-install bundle.
305305
oci_cli_whl_files = glob.glob(match_wheel)
306306
if os.path.exists('./' + dependency_dir) and len(oci_cli_whl_files) > 0:
307+
dependency_dir = DEFAULT_DEPENDENCY_DIR + '/python{}{}.html'.format(sys.version_info.major, sys.version_info.minor)
307308
print_status("Installing {} from local resources.".format(oci_cli_whl_files[0]))
308309
cmd = [path_to_pip, 'install', '--cache-dir', tmp_dir, oci_cli_whl_files[0], '--upgrade', '--find-links', dependency_dir]
309310

310311
elif OFFLINE_INSTALL:
311-
# # Since cffi is a 4th party library which cryptography uses, it needs to be installed first in the offline installation
312-
# cmd = [path_to_pip, 'install', 'cffi', '--find-links', dependency_dir, '--no-index']
313-
# exec_command(cmd, env=env)
312+
dependency_dir = DEFAULT_DEPENDENCY_DIR + '/python{}{}.html'.format(sys.version_info.major, sys.version_info.minor)
314313
cmd = [path_to_pip, 'install', cli_package_name, '--find-links', dependency_dir, '--no-index', '--ignore-requires-python']
315314
else:
316315
cmd = [path_to_pip, 'install', '--cache-dir', tmp_dir, cli_package_name, '--upgrade']
@@ -626,9 +625,7 @@ def main():
626625
global OFFLINE_INSTALL
627626
OFFLINE_INSTALL = args.offline_install
628627
dependency_dir = args.dependency_dir
629-
if dependency_dir is None and OFFLINE_INSTALL:
630-
dependency_dir = DEFAULT_DEPENDENCY_DIR + '/python{}{}'.format(sys.version_info.major, sys.version_info.minor)
631-
elif dependency_dir is None:
628+
if dependency_dir is None:
632629
dependency_dir = DEFAULT_DEPENDENCY_DIR
633630
global DRY_RUN
634631
DRY_RUN = args.dry_run
Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
# coding: utf-8
2+
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
3+
4+
import os
5+
import shutil
6+
7+
8+
def remove_dir_at_path(path):
9+
if os.path.exists(path):
10+
shutil.rmtree(path)

services/object_storage/tests/integ/test_object_storage_sync_download.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -932,7 +932,8 @@ def create_empty_folders_local(path, no_of_folders_to_create):
932932

933933
def cleanup_files_from_local(file_set):
934934
for file in file_set:
935-
os.remove(file)
935+
if os.path.exists(file):
936+
os.remove(file)
936937

937938

938939
def create_new_objects_remote(client, bucket_name, no_of_files_to_create, with_content=False, extension='txt',

services/object_storage/tests/integ/test_object_storage_sync_upload.py

Lines changed: 27 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
from test_object_storage_sync_download import parse_dry_run_result, \
1717
generate_random_string, create_new_objects_remote, create_new_files_local, cleanup_files_from_local
1818
from tests import util
19+
import services.object_storage.tests.common.util as object_storage_util
1920

2021
OBJECTS_TO_CREATE_IN_REMOTE_FOR_SYNC = 20
2122

@@ -30,6 +31,10 @@
3031
'': []
3132
}
3233

34+
new_path = 'new_path'
35+
new_files_set = 'new_files_set'
36+
new_dir = 'new_dir'
37+
3338

3439
@pytest.fixture(params=[False])
3540
def debug(request):
@@ -86,6 +91,14 @@ def teardown(debug):
8691
sync_upload_bucket_name, '--force'], debug=debug)
8792

8893

94+
@pytest.fixture()
95+
def cleanup_new_content_set():
96+
data = {new_files_set: {}, new_dir: ''}
97+
yield data
98+
cleanup_files_from_local(data[new_files_set])
99+
object_storage_util.remove_dir_at_path(data[new_dir])
100+
101+
89102
@util.skip_while_rerecording
90103
def test_sync_src_dry_run(object_storage_client, debug):
91104
"""
@@ -162,7 +175,7 @@ def test_sync_src_updated_objects(object_storage_client):
162175

163176

164177
@util.skip_while_rerecording
165-
def test_sync_src_new_objects(object_storage_client):
178+
def test_sync_src_new_objects(object_storage_client, cleanup_new_content_set):
166179
"""
167180
1. Upload the files to remote and validate the output and file contents
168181
2. Create a new file in local
@@ -176,6 +189,7 @@ def test_sync_src_new_objects(object_storage_client):
176189
new_object_name = 'a/new_object'
177190
new_file_contents = {new_object_name: bulk_operation.generate_random_string(bulk_operation.CONTENT_STRING_LENGTH)}
178191
new_file_path = os.path.join(sync_upload_test_dir, new_object_name)
192+
cleanup_new_content_set[new_files_set] = {new_file_path}
179193
with open(new_file_path, 'w') as fh:
180194
fh.write(new_file_contents[new_object_name])
181195

@@ -425,7 +439,7 @@ def test_sync_src_with_delete_paging(object_storage_client, debug):
425439

426440

427441
@util.skip_while_rerecording
428-
def test_sync_src_with_delete_and_include(object_storage_client):
442+
def test_sync_src_with_delete_and_include(object_storage_client, cleanup_new_content_set):
429443
"""
430444
1. Create new set of objects in local with .pdf and .doc extensions.
431445
2. Perform a dry run with --delete to include only *.pdf and validate that only .pdf files are transferred.
@@ -439,6 +453,8 @@ def test_sync_src_with_delete_and_include(object_storage_client):
439453

440454
l_file_set_1 = create_new_files_local(sync_upload_test_dir, 3, extension='pdf')
441455
l_file_set_2 = create_new_files_local(sync_upload_test_dir, 5, extension='doc')
456+
cleanup_new_content_set[new_files_set] = l_file_set_1.union(l_file_set_2)
457+
cleanup_new_content_set[new_dir] = os.path.join(sync_upload_test_dir, 'dir')
442458

443459
result = bulk_operation.invoke(['os', 'object', 'sync', '--namespace', util.NAMESPACE, '--bucket-name',
444460
sync_upload_bucket_name, '--src-dir', sync_upload_test_dir, '--delete',
@@ -479,11 +495,9 @@ def test_sync_src_with_delete_and_include(object_storage_client):
479495
assert parsed_result['skipped-objects'] == []
480496
assert set(parsed_result['deleted-objects']) == r_obj_set_1
481497

482-
cleanup_files_from_local(l_file_set_1.union(l_file_set_2))
483-
484498

485499
@util.skip_while_rerecording
486-
def test_sync_src_with_delete_and_exclude(object_storage_client, debug):
500+
def test_sync_src_with_delete_and_exclude(object_storage_client, debug, cleanup_new_content_set):
487501
"""
488502
1. Create new set of objects in local with .pdf and .doc extensions.
489503
2. Perform a dry run with --delete to exclude only *.pdf and validate that all other files are transferred.
@@ -497,6 +511,8 @@ def test_sync_src_with_delete_and_exclude(object_storage_client, debug):
497511

498512
l_file_set_1 = create_new_files_local(sync_upload_test_dir, 3, extension='pdf')
499513
l_file_set_2 = create_new_files_local(sync_upload_test_dir, 5, extension='doc')
514+
cleanup_new_content_set[new_files_set] = l_file_set_1.union(l_file_set_2)
515+
cleanup_new_content_set[new_dir] = os.path.join(sync_upload_test_dir, 'dir')
500516

501517
result = bulk_operation.invoke(['os', 'object', 'sync', '--namespace', util.NAMESPACE, '--bucket-name',
502518
sync_upload_bucket_name, '--src-dir', sync_upload_test_dir, '--delete',
@@ -537,8 +553,6 @@ def test_sync_src_with_delete_and_exclude(object_storage_client, debug):
537553
assert parsed_result['skipped-objects'] == []
538554
assert set(parsed_result['deleted-objects']) == r_obj_set_2
539555

540-
cleanup_files_from_local(l_file_set_1.union(l_file_set_2))
541-
542556

543557
@util.skip_while_rerecording
544558
def test_sync_src_with_delete_and_prefix(object_storage_client, debug):
@@ -601,7 +615,7 @@ def test_sync_src_with_delete_and_prefix(object_storage_client, debug):
601615

602616

603617
@util.skip_while_rerecording
604-
def test_sync_src_with_delete_include_and_prefix(object_storage_client, debug):
618+
def test_sync_src_with_delete_include_and_prefix(object_storage_client, debug, cleanup_new_content_set):
605619
"""
606620
Assert that scope of file transfer and delete is only limited to --include pattern within the bucket prefix
607621
@@ -621,6 +635,8 @@ def test_sync_src_with_delete_include_and_prefix(object_storage_client, debug):
621635

622636
l_file_set_1 = create_new_files_local(sync_upload_test_dir, 6, extension='pdf')
623637
l_file_set_2 = create_new_files_local(sync_upload_test_dir, 4, extension='doc')
638+
cleanup_new_content_set[new_files_set] = l_file_set_1.union(l_file_set_2)
639+
cleanup_new_content_set[new_dir] = os.path.join(sync_upload_test_dir, 'dir')
624640

625641
result = bulk_operation.invoke(['os', 'object', 'sync', '--namespace', util.NAMESPACE, '--bucket-name',
626642
sync_upload_bucket_name, '--src-dir', sync_upload_test_dir, '--prefix',
@@ -663,11 +679,10 @@ def test_sync_src_with_delete_include_and_prefix(object_storage_client, debug):
663679
assert set(parsed_result['uploaded-objects']) == set([(os.path.join(_prefix, f[len(sync_upload_test_dir):].replace(os.sep, '/').strip('/'))) for f in l_file_set_1.union(l_file_set_2)])
664680
assert parsed_result['skipped-objects'] == []
665681
assert set(parsed_result['deleted-objects']) == r_obj_set_1
666-
cleanup_files_from_local(l_file_set_1.union(l_file_set_2))
667682

668683

669684
@util.skip_while_rerecording
670-
def test_sync_src_with_delete_exclude_and_prefix(object_storage_client, debug):
685+
def test_sync_src_with_delete_exclude_and_prefix(object_storage_client, debug, cleanup_new_content_set):
671686
"""
672687
Assert that scope of file transfer and delete is only limited to --exclude pattern within the bucket prefix
673688
@@ -686,6 +701,8 @@ def test_sync_src_with_delete_exclude_and_prefix(object_storage_client, debug):
686701
_prefix = 'upload_prefix_exclude_delete/'
687702
l_file_set_1 = create_new_files_local(sync_upload_test_dir, 6, extension='pdf')
688703
l_file_set_2 = create_new_files_local(sync_upload_test_dir, 4, extension='doc')
704+
cleanup_new_content_set[new_files_set] = l_file_set_1.union(l_file_set_2)
705+
cleanup_new_content_set[new_dir] = os.path.join(sync_upload_test_dir, 'dir')
689706

690707
result = bulk_operation.invoke(['os', 'object', 'sync', '--namespace', util.NAMESPACE, '--bucket-name',
691708
sync_upload_bucket_name, '--src-dir', sync_upload_test_dir, '--prefix',
@@ -728,7 +745,6 @@ def test_sync_src_with_delete_exclude_and_prefix(object_storage_client, debug):
728745
assert set(parsed_result['uploaded-objects']) == set([(os.path.join(_prefix, o).replace(os.sep, '/')) for o in sync_local_object_content.keys()])
729746
assert parsed_result['skipped-objects'] == []
730747
assert set(parsed_result['deleted-objects']) == r_obj_set_2
731-
cleanup_files_from_local(l_file_set_1.union(l_file_set_2))
732748

733749

734750
@util.skip_while_rerecording

services/object_storage/tests/unit/test_objectstorage_extended.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -469,7 +469,7 @@ def test_preauth_request_create(self):
469469
assert "Error: Missing option(s) --bucket-name, --name, --access-type, --time-expires." in result.output
470470

471471
result = util.invoke_command(['os', 'preauth-request', 'create', '--bucket-name', 'b001', '--name', 'par', '--access-type', 'new-access', '--time-expires', '2017-09-15T20:30:00.123456Z'])
472-
assert "Error: Invalid value for '--access-type': invalid choice: new-access. (choose from ObjectRead, ObjectWrite, ObjectReadWrite, AnyObjectWrite, AnyObjectRead, AnyObjectReadWrite)" in result.output
472+
assert "Error: Invalid value for '--access-type': invalid choice: new-access." in result.output
473473

474474
def test_preauth_request_delete(self):
475475
result = util.invoke_command(['os', 'preauth-request', 'delete'])

services/rover/src/oci_cli_rover/rover_utils.py

Lines changed: 65 additions & 63 deletions
Original file line numberDiff line numberDiff line change
@@ -96,6 +96,71 @@ def modify_image_workload_name(image_workload_name):
9696
return image_workload_name
9797

9898

99+
def get_object_storage_helper(ctx):
100+
# Override the region for r1 for os public endpoint
101+
if 'config' in ctx.obj:
102+
if 'region' in ctx.obj['config'] and ctx.obj['config']['region']: # region present in ~/.oci/config
103+
if ctx.obj['config']['region'].strip().split(".")[0] == "r1":
104+
region = ctx.obj['config']['region']
105+
ctx.obj['region'] = "r1.oracleiaas.com"
106+
object_cli = cli_util.build_client('object_storage', 'object_storage', ctx)
107+
ctx.obj['region'] = region
108+
return object_cli
109+
return cli_util.build_client('object_storage', 'object_storage', ctx)
110+
111+
112+
def validate_get_image(ctx, **kwargs):
113+
if 'image_id' in kwargs and not kwargs['image_id']:
114+
raise click.UsageError('Parameter image-id cannot be whitespace or empty string')
115+
image_id = kwargs['image_id']
116+
try:
117+
compute_image_obj = get_compute_image_helper(ctx, image_id)
118+
if compute_image_obj is None or compute_image_obj.data is None:
119+
raise click.UsageError("Image not authorized or not found")
120+
except Exception as e:
121+
raise click.UsageError("Image not authorized or not found")
122+
return compute_image_obj
123+
124+
125+
def validate_bucket(compartment_id, ctx, **kwargs):
126+
if kwargs['type'].lower() == "bucket":
127+
if not ('bucket_name' in kwargs and kwargs['bucket_name']):
128+
raise click.UsageError('Parameter bucket-name cannot be whitespace or empty string')
129+
try:
130+
object_storage_obj = get_object_storage_helper(ctx)
131+
kwargs_os = {
132+
'compartment_id': compartment_id}
133+
namespace = object_storage_obj.get_namespace(**kwargs_os).data
134+
result = object_storage_obj.get_bucket(
135+
namespace_name=namespace,
136+
bucket_name=kwargs['bucket_name']
137+
)
138+
if result is None or result.data is None:
139+
raise click.UsageError("Bucket not authorized or not found")
140+
except Exception as e:
141+
raise click.UsageError("Bucket not authorized or not found")
142+
return result
143+
144+
145+
def prepare_bucket_workload_data(result_bucket, **kwargs):
146+
workload_data = [{
147+
"workloadType": "BUCKET", "id": result_bucket.data.name, "name": kwargs['bucket_name'],
148+
"compartmentId": result_bucket.data.compartment_id,
149+
'prefix': kwargs['prefix'], 'rangeStart': kwargs['range_start'], 'rangeEnd': kwargs['range_end']
150+
}]
151+
return workload_data
152+
153+
154+
def prepare_image_workload_data(compute_image_obj, image_id):
155+
compute_image_obj_name = modify_image_workload_name(compute_image_obj.data.display_name)
156+
workload_data = [
157+
{'workloadType': "IMAGE", 'id': image_id, 'name': compute_image_obj_name,
158+
'size': compute_image_obj.data.size_in_mbs, 'compartmentId': compute_image_obj.data.compartment_id,
159+
}
160+
]
161+
return workload_data
162+
163+
99164
def create_master_key_policy_rover_resource(resource_name, ctx, **kwargs):
100165
confirm_prompt_policy = "You are providing your own master key ID, please create a policy to allow " \
101166
"Roving Edge Infrastructure to create DEKs and use the master key ID to encrypt " \
@@ -201,66 +266,3 @@ def remove_additional_params_after_policy(**kwargs):
201266
if 'policy_compartment_id' in kwargs:
202267
kwargs.pop('policy_compartment_id')
203268
return kwargs
204-
205-
206-
def get_object_storage_helper(ctx):
207-
# Override the region for r1 for os public endpoint
208-
if 'config' in ctx.obj:
209-
if 'region' in ctx.obj['config'] and ctx.obj['config']['region']: # region present in ~/.oci/config
210-
if ctx.obj['config']['region'].strip().split(".")[0] == "r1":
211-
region = ctx.obj['config']['region']
212-
ctx.obj['region'] = "r1.oracleiaas.com"
213-
object_cli = cli_util.build_client('object_storage', 'object_storage', ctx)
214-
ctx.obj['region'] = region
215-
return object_cli
216-
return cli_util.build_client('object_storage', 'object_storage', ctx)
217-
218-
219-
def validate_get_image(ctx, **kwargs):
220-
if 'image_id' in kwargs and not kwargs['image_id']:
221-
raise click.UsageError('Parameter image-id cannot be whitespace or empty string')
222-
image_id = kwargs['image_id']
223-
try:
224-
compute_image_obj = get_compute_image_helper(ctx, image_id)
225-
if compute_image_obj is None or compute_image_obj.data is None:
226-
raise click.UsageError("Image not authorized or not found")
227-
except Exception as e:
228-
raise click.UsageError("Image not authorized or not found")
229-
return compute_image_obj
230-
231-
232-
def validate_bucket(ctx, **kwargs):
233-
if kwargs['type'].lower() == "bucket":
234-
if not ('bucket_name' in kwargs and kwargs['bucket_name']):
235-
raise click.UsageError('Parameter bucket-name cannot be whitespace or empty string')
236-
try:
237-
object_storage_obj = get_object_storage_helper(ctx)
238-
namespace = object_storage_obj.get_namespace().data
239-
result = object_storage_obj.get_bucket(
240-
namespace_name=namespace,
241-
bucket_name=kwargs['bucket_name']
242-
)
243-
if result is None or result.data is None:
244-
raise click.UsageError("Bucket not authorized or not found")
245-
except Exception as e:
246-
raise click.UsageError("Bucket not authorized or not found")
247-
return result
248-
249-
250-
def prepare_bucket_workload_data(result_bucket, **kwargs):
251-
workload_data = [{
252-
"workloadType": "BUCKET", "id": result_bucket.data.name, "name": kwargs['bucket_name'],
253-
"compartmentId": result_bucket.data.compartment_id,
254-
'prefix': kwargs['prefix'], 'rangeStart': kwargs['range_start'], 'rangeEnd': kwargs['range_end']
255-
}]
256-
return workload_data
257-
258-
259-
def prepare_image_workload_data(compute_image_obj, image_id):
260-
compute_image_obj_name = modify_image_workload_name(compute_image_obj.data.display_name)
261-
workload_data = [
262-
{'workloadType': "IMAGE", 'id': image_id, 'name': compute_image_obj_name,
263-
'size': compute_image_obj.data.size_in_mbs, 'compartmentId': compute_image_obj.data.compartment_id,
264-
}
265-
]
266-
return workload_data

services/rover/src/oci_cli_rover_cluster/rovercluster_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -174,7 +174,7 @@ def add_workload_common(ctx, **kwargs):
174174
result = get_rover_cluster_helper(ctx, kwargs['cluster_id'])
175175

176176
if kwargs['type'].lower() == "bucket":
177-
result_bucket = validate_bucket(ctx, **kwargs)
177+
result_bucket = validate_bucket(result.data.compartment_id, ctx, **kwargs)
178178

179179
workload_id = result_bucket.data.name
180180
workload_data = prepare_bucket_workload_data(result_bucket, **kwargs)

0 commit comments

Comments
 (0)