Skip to content

Commit 6cbc593

Browse files
williexutroydai
authored andcommitted
[Storage] Get sas-token from container url and cleanup code (#6996)
1 parent 5afb8bd commit 6cbc593

File tree

6 files changed

+102
-70
lines changed

6 files changed

+102
-70
lines changed

src/command_modules/azure-cli-storage/HISTORY.rst

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ Release History
99
* Make '--resource-group' parameter optional for 'storage account' commands.
1010
* Remove 'Failed precondition' warnings for individual failures in batch commands for single aggregated message.
1111
* blob/file delete-batch commands no longer output array of nulls.
12+
* blob download/upload/delete-batch commands will read sas-token from container url
1213

1314
2.1.1
1415
+++++

src/command_modules/azure-cli-storage/azure/cli/command_modules/storage/_params.py

Lines changed: 4 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -264,13 +264,12 @@ def load_arguments(self, _): # pylint: disable=too-many-locals, too-many-statem
264264

265265
with self.argument_context('storage blob upload-batch') as c:
266266
from .sdkutil import get_blob_types
267-
from ._validators import process_blob_upload_batch_parameters
268267

269268
t_blob_content_settings = self.get_sdk('blob.models#ContentSettings')
270269
c.register_content_settings_argument(t_blob_content_settings, update=False, arg_group='Content Control')
271270
c.ignore('source_files', 'destination_container_name')
272271

273-
c.argument('source', options_list=('--source', '-s'), validator=process_blob_upload_batch_parameters)
272+
c.argument('source', options_list=('--source', '-s'))
274273
c.argument('destination', options_list=('--destination', '-d'))
275274
c.argument('max_connections', type=int,
276275
help='Maximum number of parallel connections to use when the blob size exceeds 64MB.')
@@ -290,11 +289,9 @@ def load_arguments(self, _): # pylint: disable=too-many-locals, too-many-statem
290289
c.extra('socket_timeout', socket_timeout_type)
291290

292291
with self.argument_context('storage blob download-batch') as c:
293-
from azure.cli.command_modules.storage._validators import process_blob_download_batch_parameters
294-
295292
c.ignore('source_container_name')
296293
c.argument('destination', options_list=('--destination', '-d'))
297-
c.argument('source', options_list=('--source', '-s'), validator=process_blob_download_batch_parameters)
294+
c.argument('source', options_list=('--source', '-s'))
298295
c.extra('no_progress', progress_type)
299296
c.extra('socket_timeout', socket_timeout_type)
300297
c.argument('max_connections', type=int,
@@ -305,10 +302,8 @@ def load_arguments(self, _): # pylint: disable=too-many-locals, too-many-statem
305302
c.argument('delete_snapshots', arg_type=get_enum_type(get_delete_blob_snapshot_type_names()))
306303

307304
with self.argument_context('storage blob delete-batch') as c:
308-
from azure.cli.command_modules.storage._validators import process_blob_batch_source_parameters
309-
310305
c.ignore('source_container_name')
311-
c.argument('source', options_list=('--source', '-s'), validator=process_blob_batch_source_parameters)
306+
c.argument('source', options_list=('--source', '-s'))
312307
c.argument('delete_snapshots', arg_type=get_enum_type(get_delete_blob_snapshot_type_names()),
313308
help='Required if the blob has associated snapshots.')
314309
c.argument('lease_id', help='Required if the blob has an active lease.')
@@ -335,8 +330,7 @@ def load_arguments(self, _): # pylint: disable=too-many-locals, too-many-statem
335330
c.register_source_uri_arguments(validator=validate_source_uri)
336331

337332
with self.argument_context('storage blob copy start-batch', arg_group='Copy Source') as c:
338-
from azure.cli.command_modules.storage._validators import (get_source_file_or_blob_service_client,
339-
process_blob_copy_batch_namespace)
333+
from azure.cli.command_modules.storage._validators import get_source_file_or_blob_service_client
340334

341335
c.argument('source_client', ignore_type, validator=get_source_file_or_blob_service_client)
342336

@@ -346,7 +340,6 @@ def load_arguments(self, _): # pylint: disable=too-many-locals, too-many-statem
346340
c.argument('source_sas')
347341
c.argument('source_container')
348342
c.argument('source_share')
349-
c.argument('prefix', validator=process_blob_copy_batch_namespace)
350343

351344
with self.argument_context('storage blob incremental-copy start') as c:
352345
from azure.cli.command_modules.storage._validators import process_blob_source_uri

src/command_modules/azure-cli-storage/azure/cli/command_modules/storage/_validators.py

Lines changed: 48 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -676,7 +676,7 @@ def _update_progress(current, total):
676676
del namespace.no_progress
677677

678678

679-
def process_blob_download_batch_parameters(namespace, cmd):
679+
def process_blob_download_batch_parameters(cmd, namespace):
680680
"""Process the parameters for storage blob download command"""
681681
import os
682682

@@ -685,65 +685,22 @@ def process_blob_download_batch_parameters(namespace, cmd):
685685
raise ValueError('incorrect usage: destination must be an existing directory')
686686

687687
# 2. try to extract account name and container name from source string
688-
process_blob_batch_source_parameters(cmd, namespace)
688+
_process_blob_batch_container_parameters(cmd, namespace)
689689

690-
691-
def process_blob_batch_source_parameters(cmd, namespace):
692-
"""Process the parameters for storage blob download command"""
693-
694-
# try to extract account name and container name from source string
695-
from .storage_url_helpers import StorageResourceIdentifier
696-
identifier = StorageResourceIdentifier(cmd.cli_ctx.cloud, namespace.source)
697-
698-
if not identifier.is_url():
699-
namespace.source_container_name = namespace.source
700-
elif identifier.blob:
701-
raise ValueError('incorrect usage: source should be either container URL or name')
702-
else:
703-
namespace.source_container_name = identifier.container
704-
if namespace.account_name is None:
705-
namespace.account_name = identifier.account_name
690+
# 3. Call validators
691+
add_progress_callback(cmd, namespace)
706692

707693

708694
def process_blob_upload_batch_parameters(cmd, namespace):
709695
"""Process the source and destination of storage blob upload command"""
710696
import os
711697

712698
# 1. quick check
713-
if not os.path.exists(namespace.source):
714-
raise ValueError('incorrect usage: source {} does not exist'.format(namespace.source))
715-
716-
if not os.path.isdir(namespace.source):
717-
raise ValueError('incorrect usage: source must be a directory')
699+
if not os.path.exists(namespace.source) or not os.path.isdir(namespace.source):
700+
raise ValueError('incorrect usage: source must be an existing directory')
718701

719702
# 2. try to extract account name and container name from destination string
720-
from .storage_url_helpers import StorageResourceIdentifier
721-
identifier = StorageResourceIdentifier(cmd.cli_ctx.cloud, namespace.destination)
722-
723-
if not identifier.is_url():
724-
namespace.destination_container_name = namespace.destination
725-
elif identifier.blob is not None:
726-
raise ValueError('incorrect usage: destination cannot be a blob url')
727-
else:
728-
namespace.destination_container_name = identifier.container
729-
730-
if namespace.account_name:
731-
if namespace.account_name != identifier.account_name:
732-
raise ValueError(
733-
'The given storage account name is not consistent with the account name in the destination URI')
734-
else:
735-
namespace.account_name = identifier.account_name
736-
737-
if not (namespace.account_key or namespace.sas_token or namespace.connection_string):
738-
validate_client_parameters(cmd, namespace)
739-
740-
# it is possible the account name be overwritten by the connection string
741-
if namespace.account_name != identifier.account_name:
742-
raise ValueError(
743-
'The given storage account name is not consistent with the account name in the destination URI')
744-
745-
if not (namespace.account_key or namespace.sas_token or namespace.connection_string):
746-
raise ValueError('Missing storage account credential information.')
703+
_process_blob_batch_container_parameters(cmd, namespace, source=False)
747704

748705
# 3. collect the files to be uploaded
749706
namespace.source = os.path.realpath(namespace.source)
@@ -765,10 +722,48 @@ def process_blob_upload_batch_parameters(cmd, namespace):
765722
else:
766723
namespace.blob_type = 'block'
767724

725+
# 5. call other validators
726+
validate_metadata(namespace)
727+
t_blob_content_settings = cmd.loader.get_sdk('blob.models#ContentSettings')
728+
get_content_setting_validator(t_blob_content_settings, update=False)(cmd, namespace)
729+
add_progress_callback(cmd, namespace)
730+
731+
732+
def process_blob_delete_batch_parameters(cmd, namespace):
733+
_process_blob_batch_container_parameters(cmd, namespace)
734+
735+
736+
def _process_blob_batch_container_parameters(cmd, namespace, source=True):
737+
"""Process the container parameters for storage blob batch commands before populating args from environment."""
738+
if source:
739+
container_arg, container_name_arg = 'source', 'source_container_name'
740+
else:
741+
# destination
742+
container_arg, container_name_arg = 'destination', 'destination_container_name'
743+
744+
# try to extract account name and container name from source string
745+
from .storage_url_helpers import StorageResourceIdentifier
746+
container_arg_val = getattr(namespace, container_arg) # either a url or name
747+
identifier = StorageResourceIdentifier(cmd.cli_ctx.cloud, container_arg_val)
748+
749+
if not identifier.is_url():
750+
setattr(namespace, container_name_arg, container_arg_val)
751+
elif identifier.blob:
752+
raise ValueError('incorrect usage: {} should be either a container URL or name'.format(container_arg))
753+
else:
754+
setattr(namespace, container_name_arg, identifier.container)
755+
if namespace.account_name is None:
756+
namespace.account_name = identifier.account_name
757+
elif namespace.account_name != identifier.account_name:
758+
raise ValueError('The given storage account name is not consistent with the '
759+
'account name in the destination URL')
760+
761+
# if no sas-token is given and the container url contains one, use it
762+
if not namespace.sas_token and identifier.sas_token:
763+
namespace.sas_token = identifier.sas_token
768764

769-
def process_blob_copy_batch_namespace(namespace):
770-
if namespace.prefix is None and not namespace.recursive:
771-
raise ValueError('incorrect usage: --recursive | --pattern PATTERN')
765+
# Finally, grab missing storage connection parameters from environment variables
766+
validate_client_parameters(cmd, namespace)
772767

773768

774769
def process_file_upload_batch_parameters(cmd, namespace):

src/command_modules/azure-cli-storage/azure/cli/command_modules/storage/commands.py

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -93,6 +93,8 @@ def get_custom_sdk(custom_module, client_factory, resource_type=ResourceType.DAT
9393
from ._format import transform_boolean_for_table, transform_blob_output
9494
from ._transformers import (transform_storage_list_output, transform_url,
9595
create_boolean_result_output_transformer)
96+
from ._validators import (process_blob_download_batch_parameters, process_blob_delete_batch_parameters,
97+
process_blob_upload_batch_parameters)
9698

9799
g.storage_command('list', 'list_blobs', transform=transform_storage_list_output,
98100
table_transformer=transform_blob_output)
@@ -110,9 +112,12 @@ def get_custom_sdk(custom_module, client_factory, resource_type=ResourceType.DAT
110112
g.storage_custom_command('set-tier', 'set_blob_tier')
111113
g.storage_custom_command('upload', 'upload_blob',
112114
doc_string_source='blob#BlockBlobService.create_blob_from_path')
113-
g.storage_custom_command('upload-batch', 'storage_blob_upload_batch')
114-
g.storage_custom_command('download-batch', 'storage_blob_download_batch')
115-
g.storage_custom_command('delete-batch', 'storage_blob_delete_batch')
115+
g.storage_custom_command('upload-batch', 'storage_blob_upload_batch',
116+
validator=process_blob_upload_batch_parameters)
117+
g.storage_custom_command('download-batch', 'storage_blob_download_batch',
118+
validator=process_blob_download_batch_parameters)
119+
g.storage_custom_command('delete-batch', 'storage_blob_delete_batch',
120+
validator=process_blob_delete_batch_parameters)
116121
g.storage_custom_command('show', 'show_blob', table_transformer=transform_blob_output,
117122
client_factory=page_blob_service_factory,
118123
doc_string_source='blob#PageBlobService.get_blob_properties',

src/command_modules/azure-cli-storage/azure/cli/command_modules/storage/operations/blob.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -201,7 +201,8 @@ def _upload_blob(*args, **kwargs):
201201
results.append(_create_return_result(dst, guessed_content_settings, result))
202202

203203
num_failures = len(source_files) - len(results)
204-
logger.warning('%s of %s files not uploaded due to "Failed Precondition"', num_failures, len(source_files))
204+
if num_failures:
205+
logger.warning('%s of %s files not uploaded due to "Failed Precondition"', num_failures, len(source_files))
205206
return results
206207

207208

@@ -322,10 +323,10 @@ def _delete_blob(blob_name):
322323
}
323324
return client.delete_blob(**delete_blob_args)
324325

326+
logger = get_logger(__name__)
325327
source_blobs = list(collect_blobs(client, source_container_name, pattern))
326328

327329
if dryrun:
328-
logger = get_logger(__name__)
329330
logger.warning('delete action: from %s', source)
330331
logger.warning(' pattern %s', pattern)
331332
logger.warning(' container %s', source_container_name)
@@ -337,7 +338,8 @@ def _delete_blob(blob_name):
337338

338339
results = [result for include, result in (_delete_blob(blob) for blob in source_blobs) if include]
339340
num_failures = len(source_blobs) - len(results)
340-
logger.warning('%s of %s blobs not deleted due to "Failed Precondition"', num_failures, len(source_blobs))
341+
if num_failures:
342+
logger.warning('%s of %s blobs not deleted due to "Failed Precondition"', num_failures, len(source_blobs))
341343

342344

343345
def _copy_blob_to_blob_container(blob_service, source_blob_service, destination_container, destination_path,

src/command_modules/azure-cli-storage/azure/cli/command_modules/storage/tests/latest/test_storage_batch_operations.py

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -421,6 +421,42 @@ def create_and_populate_share():
421421
self.storage_cmd('storage file delete-batch -s {} --pattern nonexists/*', storage_account_info,
422422
src_share).assert_with_checks(JMESPathCheck('length(@)', 0))
423423

424+
@ResourceGroupPreparer()
425+
@StorageAccountPreparer()
426+
@StorageTestFilesPreparer()
427+
def test_storage_blob_batch_sas_scenarios(self, test_dir, storage_account_info):
428+
from datetime import datetime, timedelta
429+
430+
container_name = self.create_container(storage_account_info)
431+
temp_dir = self.create_temp_dir()
432+
expiry = (datetime.utcnow() + timedelta(hours=1)).strftime('%Y-%m-%dT%H:%MZ')
433+
434+
sas_token = self.storage_cmd('storage container generate-sas -n {} --permissions dwrl '
435+
'--expiry {}', storage_account_info, container_name, expiry).get_output_in_json()
436+
437+
storage_account = self.cmd('storage account show -n {}'.format(storage_account_info[0])).get_output_in_json()
438+
439+
# create container url with sas token
440+
container_url = storage_account.get('primaryEndpoints').get('blob') + container_name
441+
container_url += '?' + sas_token
442+
443+
self.kwargs.update({
444+
'test_dir': test_dir,
445+
'container_url': container_url,
446+
'temp_dir': temp_dir
447+
})
448+
449+
self.cmd('storage blob upload-batch -s "{test_dir}" -d {container_url}')
450+
self.cmd('storage blob download-batch -s {container_url} -d "{temp_dir}"')
451+
452+
self.storage_cmd('storage blob list -c {}', storage_account_info, container_name).assert_with_checks(
453+
JMESPathCheck('length(@)', sum(len(files) for _, __, files in os.walk(test_dir))),
454+
JMESPathCheck('length(@)', sum(len(files) for _, __, files in os.walk(temp_dir))))
455+
456+
self.cmd('storage blob delete-batch -s {container_url}')
457+
self.storage_cmd('storage blob list -c {}', storage_account_info, container_name).assert_with_checks(
458+
JMESPathCheck('length(@)', 0))
459+
424460

425461
if __name__ == '__main__':
426462
import unittest

0 commit comments

Comments
 (0)