diff --git a/src/databox/HISTORY.rst b/src/databox/HISTORY.rst index c3e5d4838ef..1c139576ba0 100644 --- a/src/databox/HISTORY.rst +++ b/src/databox/HISTORY.rst @@ -3,14 +3,6 @@ Release History =============== -0.1.2 -++++++ -* Migrate to track2 SDK - -0.1.1 -++++++ -* GA databox module. - 0.1.0 ++++++ * Initial release. diff --git a/src/databox/azext_databox/__init__.py b/src/databox/azext_databox/__init__.py index f7ea258697c..d4314b0a3a4 100644 --- a/src/databox/azext_databox/__init__.py +++ b/src/databox/azext_databox/__init__.py @@ -1,32 +1,50 @@ -# -------------------------------------------------------------------------------------------- +# -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- from azure.cli.core import AzCommandsLoader +from azext_databox.generated._help import helps # pylint: disable=unused-import +try: + from azext_databox.manual._help import helps # pylint: disable=reimported +except ImportError: + pass -import azext_databox._help # pylint: disable=unused-import - -class DataBoxCommandsLoader(AzCommandsLoader): +class DataBoxManagementClientCommandsLoader(AzCommandsLoader): def __init__(self, cli_ctx=None): from azure.cli.core.commands import CliCommandType - from azext_databox._client_factory import cf_databox + from azext_databox.generated._client_factory import cf_databox_cl databox_custom = CliCommandType( operations_tmpl='azext_databox.custom#{}', - client_factory=cf_databox) - super(DataBoxCommandsLoader, self).__init__(cli_ctx=cli_ctx, - custom_command_type=databox_custom) + client_factory=cf_databox_cl) + parent = super(DataBoxManagementClientCommandsLoader, self) + parent.__init__(cli_ctx=cli_ctx, custom_command_type=databox_custom) def load_command_table(self, args): - from azext_databox.commands import load_command_table + from azext_databox.generated.commands import load_command_table load_command_table(self, args) + try: + from azext_databox.manual.commands import load_command_table as load_command_table_manual + load_command_table_manual(self, args) + except ImportError: + pass return self.command_table def load_arguments(self, command): - from azext_databox._params import load_arguments + from azext_databox.generated._params import load_arguments load_arguments(self, command) + try: + from azext_databox.manual._params import load_arguments as load_arguments_manual + load_arguments_manual(self, command) + except ImportError: + pass -COMMAND_LOADER_CLS = DataBoxCommandsLoader +COMMAND_LOADER_CLS = DataBoxManagementClientCommandsLoader diff --git a/src/databox/azext_databox/_client_factory.py b/src/databox/azext_databox/_client_factory.py deleted file mode 100644 index 441837babe7..00000000000 --- a/src/databox/azext_databox/_client_factory.py +++ /dev/null @@ -1,14 +0,0 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- - - -def cf_databox(cli_ctx, *_): - from azure.cli.core.commands.client_factory import get_mgmt_service_client - from azext_databox.vendored_sdks.databox import DataBoxManagementClient - return get_mgmt_service_client(cli_ctx, DataBoxManagementClient) - - -def cf_jobs(cli_ctx, *_): - return cf_databox(cli_ctx).jobs diff --git a/src/databox/azext_databox/_help.py b/src/databox/azext_databox/_help.py deleted file mode 100644 index 0c8d9cad8f8..00000000000 --- a/src/databox/azext_databox/_help.py +++ /dev/null @@ -1,101 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- - -# pylint: disable=too-many-lines -# pylint: disable=line-too-long -from knack.help_files import helps # pylint: disable=unused-import - - -helps['databox job'] = """ - type: group - short-summary: Commands to manage databox job. -""" - -helps['databox job create'] = """ - type: command - short-summary: Create a new job with the specified parameters. - examples: - - name: Create a databox job to use both storage account and managed disk as data destination. - text: |- - az databox job create --resource-group "SdkRg4981" --name "SdkJob3971" --location \\ - "westus" --sku "DataBox" --contact-name "Public SDK Test" \\ - --phone "1234567890" --email-list "testing@microsoft.com" \\ - --street-address1 "16 TOWNSEND ST" --street-address2 "Unit 1" --city "San Francisco" \\ - --state-or-province "CA" --country "US" --postal-code "94107" --company-name "Microsoft" \\ - --storage-account sa1 sa2 --staging-storage-account sa \\ - --resource-group-for-managed-disk /subscriptions/sub/resourceGroups/rg - - - name: Create a databoxdisk job to use storage account as data destination. - text: |- - az databox job create --resource-group "SdkRg4981" --name "SdkJob3971" --location \\ - "westus" --sku "DataBoxDisk" --expected-data-size 1 --contact-name "Public SDK Test" \\ - --phone "1234567890" --email-list "testing@microsoft.com" --street-address1 "16 TOWNSEND ST" \\ - --street-address2 "Unit 1" --city "San Francisco" --state-or-province "CA" --country "US" \\ - --postal-code "94107" --company-name "Microsoft" --storage-account sa1 -""" - -helps['databox job update'] = """ - type: command - short-summary: Update an existing job with the specified parameters. - examples: - - name: Update the job "SdkJob3971" with the specified parameters. - text: |- - az databox job update --resource-group "SdkRg4981" --name "SdkJob3971" \\ - --contact-name "Update Job" --phone "1234567890" \\ - --email-list "testing@microsoft.com" \\ - --street-address1 "16 TOWNSEND ST" \\ - --city "San Francisco" --state-or-province "CA" \\ - --country "US" --postal-code "94107" \\ - --company-name "Microsoft" \\ -""" - -helps['databox job delete'] = """ - type: command - short-summary: Delete a job. - examples: - - name: Delete the job "SdkJob3971" in resource group "SdkRg4981". - text: |- - az databox job delete --resource-group "SdkRg4981" --name "SdkJob3971" -""" - -helps['databox job show'] = """ - type: command - short-summary: Get information about the specified job. - examples: - - name: Get the information about the job "SdkJob3971". - text: |- - az databox job show --resource-group "SdkRg4981" --name "SdkJob3971" -""" - -helps['databox job list'] = """ - type: command - short-summary: List all the jobs available under the given resource group or the given subscription. - examples: - - name: List all the jobs available under the current subscription. - text: |- - az databox job list - - name: List all the jobs available under the resource group "SdkRg4981". - text: |- - az databox job list --resource-group "SdkRg4981" -""" - -helps['databox job cancel'] = """ - type: command - short-summary: Cancel a job. - examples: - - name: Cancel the job "SdkJob3971" under resource group "SdkRg4981". - text: |- - az databox job cancel --resource-group "SdkRg4981" --name "SdkJob3971" --reason "CancelTest" -""" - -helps['databox job list-credentials'] = """ - type: command - short-summary: List the unencrypted secrets related to the job. - examples: - - name: List the unencrypted secrets related to the job "TJ-636646322037905056". - text: |- - az databox job list-credentials --resource-group "bvttoolrg6" --name "TJ-636646322037905056" -""" diff --git a/src/databox/azext_databox/_params.py b/src/databox/azext_databox/_params.py deleted file mode 100644 index c9d09d4467c..00000000000 --- a/src/databox/azext_databox/_params.py +++ /dev/null @@ -1,75 +0,0 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- -# pylint: disable=line-too-long -# pylint: disable=too-many-lines -# pylint: disable=too-many-statements - -from azure.cli.core.commands.parameters import ( - tags_type, - get_enum_type, - get_location_type -) -from azure.cli.core.commands.validators import get_default_location_from_resource_group -from knack.arguments import CLIArgumentType - - -def load_arguments(self, _): - storage_accounts_type = CLIArgumentType(help='Space-separated list of the destination storage account. It can be the name or resource ID of storage account.', arg_group='Storage Account', nargs='+') - staging_storage_account_type = CLIArgumentType(help='The name or ID of the destination storage account that can be used to copy the vhd for staging.', arg_group='Managed Disk') - resource_group_for_managed_disk_type = CLIArgumentType(help='The name or ID of the destination resource group where the Compute disks should be created.', arg_group='Managed Disk') - job_name_type = CLIArgumentType(options_list=['--name', '-n'], help='The name of the job resource within the specified resource group. job names must be between 3 and 24 characters in length and use any alphanumeric and underscore only') - - with self.argument_context('databox job create') as c: - c.argument('job_name', job_name_type) - c.argument('location', arg_type=get_location_type(self.cli_ctx), default=None, - validator=get_default_location_from_resource_group) - c.argument('tags', tags_type) - c.argument('sku', arg_type=get_enum_type(['DataBox', 'DataBoxDisk', 'DataBoxHeavy']), - help='The sku type of DataBox.') - c.argument('expected_data_size', type=int, help='The expected size of the data which needs to be transferred in this job, in terabytes.The maximum usable capacity is up to 35 TB. This is only needed when sku is DataBoxDisk.') - c.argument('contact_name', help='Contact name of the person.', arg_group='Contact Details') - c.argument('phone', help='Phone number of the contact person.', arg_group='Contact Details') - c.argument('mobile', help='Mobile number of the contact person.', arg_group='Contact Details') - c.argument('email_list', help='Space-separated list of Email addresses to be notified about job progress.', arg_group='Contact Details', nargs='+') - c.argument('street_address1', help='Street Address line 1.', arg_group='Shipping Address') - c.argument('street_address2', help='Street Address line 2.', arg_group='Shipping Address') - c.argument('street_address3', help='Street Address line 3.', arg_group='Shipping Address') - c.argument('city', help='Name of the City.', arg_group='Shipping Address') - c.argument('state_or_province', help='Name of the State or Province.', arg_group='Shipping Address') - c.argument('country', help='Name of the Country. Ex: US', arg_group='Shipping Address') - c.argument('postal_code', help='Postal code.', arg_group='Shipping Address') - c.argument('company_name', help='Name of the company.', arg_group='Shipping Address') - c.extra('storage_accounts', arg_type=storage_accounts_type) - c.extra('staging_storage_account', arg_type=staging_storage_account_type) - c.extra('resource_group_for_managed_disk', arg_type=resource_group_for_managed_disk_type) - c.ignore('destination_account_details') - - with self.argument_context('databox job update') as c: - c.argument('job_name', job_name_type) - c.argument('contact_name', help='Contact name of the person.', arg_group='Contact Details') - c.argument('phone', help='Phone number of the contact person.', arg_group='Contact Details') - c.argument('mobile', help='Mobile number of the contact person.', arg_group='Contact Details') - c.argument('email_list', help='List of Email addresses to be notified about job progress.', arg_group='Contact Details', nargs='+') - c.argument('street_address1', help='Street Address line 1.', arg_group='Shipping Address') - c.argument('street_address2', help='Street Address line 2.', arg_group='Shipping Address') - c.argument('street_address3', help='Street Address line 3.', arg_group='Shipping Address') - c.argument('city', help='Name of the City.', arg_group='Shipping Address') - c.argument('state_or_province', help='Name of the State or Province.', arg_group='Shipping Address') - c.argument('country', help='Name of the Country. Ex: US', arg_group='Shipping Address') - c.argument('postal_code', help='Postal code.', arg_group='Shipping Address') - c.argument('company_name', help='Name of the company.', arg_group='Shipping Address') - - with self.argument_context('databox job delete') as c: - c.argument('job_name', job_name_type) - - with self.argument_context('databox job show') as c: - c.argument('job_name', job_name_type) - - with self.argument_context('databox job cancel') as c: - c.argument('job_name', job_name_type) - c.argument('reason', help='Reason for cancellation.') - - with self.argument_context('databox job list-credentials') as c: - c.argument('job_name', job_name_type) diff --git a/src/databox/azext_databox/_validators.py b/src/databox/azext_databox/_validators.py deleted file mode 100644 index a4339304141..00000000000 --- a/src/databox/azext_databox/_validators.py +++ /dev/null @@ -1,85 +0,0 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- - -# pylint: disable=line-too-long -from azure.cli.core.commands.client_factory import get_subscription_id -from msrestazure.tools import resource_id - - -def validate_create_input_parameters(cmd, namespace): - _parse_storage_account_details(cmd, namespace) - _parse_managed_disk_details(cmd, namespace) - _validate_expected_data_size_for_databoxdisk(namespace) - _validate_destination_account_details(namespace) - - -def _parse_storage_account_details(cmd, namespace): - """Parse storage account details for destination.""" - from msrestazure.tools import is_valid_resource_id - - if not namespace.destination_account_details: - namespace.destination_account_details = [] - - if namespace.storage_accounts: - for storage_account in namespace.storage_accounts: - if storage_account and not is_valid_resource_id(storage_account): - storage_account = resource_id( - subscription=get_subscription_id(cmd.cli_ctx), - resource_group=namespace.resource_group_name, - namespace='Microsoft.Storage', - type='storageAccounts', - name=storage_account - ) - - if storage_account: - storage_account_details = {'storage_account_id': storage_account, - 'data_destination_type': 'StorageAccount'} - namespace.destination_account_details.append(storage_account_details) - - del namespace.storage_accounts - - -def _parse_managed_disk_details(cmd, namespace): - """Parse managed disk details for destination.""" - from msrestazure.tools import is_valid_resource_id - - if not namespace.destination_account_details: - namespace.destination_account_details = [] - - subscription = get_subscription_id(cmd.cli_ctx) - if namespace.staging_storage_account and not is_valid_resource_id(namespace.staging_storage_account): - namespace.staging_storage_account = resource_id( - subscription=subscription, - resource_group=namespace.resource_group_name, - namespace='Microsoft.Storage', - type='storageAccounts', - name=namespace.staging_storage_account - ) - - if namespace.resource_group_for_managed_disk and not is_valid_resource_id( - namespace.resource_group_for_managed_disk): - namespace.resource_group_for_managed_disk = '/subscriptions/' + subscription + '/resourceGroups/' + namespace.resource_group_for_managed_disk - - if namespace.staging_storage_account and namespace.resource_group_for_managed_disk: - managed_disk_details = {'staging_storage_account_id': namespace.staging_storage_account, - 'resource_group_id': namespace.resource_group_for_managed_disk, - 'data_destination_type': 'ManagedDisk'} - namespace.destination_account_details.append(managed_disk_details) - - del namespace.staging_storage_account - del namespace.resource_group_for_managed_disk - - -def _validate_expected_data_size_for_databoxdisk(namespace): - if namespace.sku == 'DataBoxDisk' and not namespace.expected_data_size: - raise ValueError( - "You must provide '--expected-data-size' when the 'sku' is 'DataBoxDisk'.") - - -def _validate_destination_account_details(namespace): - if not namespace.destination_account_details: - raise ValueError( - "You must provide at least one '--storage-account' or the combination of '--staging-storage-account' and " - "'--resource-group-for-managed-disk'") diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/__init__.py b/src/databox/azext_databox/action.py similarity index 60% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/__init__.py rename to src/databox/azext_databox/action.py index ae972ed54f8..d95d53bf711 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/__init__.py +++ b/src/databox/azext_databox/action.py @@ -1,19 +1,17 @@ -# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# # Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wildcard-import +# pylint: disable=unused-wildcard-import -from ._data_box_management_client import DataBoxManagementClient -from ._version import VERSION - -__version__ = VERSION -__all__ = ['DataBoxManagementClient'] - +from .generated.action import * # noqa: F403 try: - from ._patch import patch_sdk # type: ignore - patch_sdk() + from .manual.action import * # noqa: F403 except ImportError: pass diff --git a/src/databox/azext_databox/azext_metadata.json b/src/databox/azext_databox/azext_metadata.json index 587a1ed232f..cfc30c747c7 100644 --- a/src/databox/azext_databox/azext_metadata.json +++ b/src/databox/azext_databox/azext_metadata.json @@ -1,3 +1,4 @@ { - "azext.minCliCoreVersion": "2.3.1" + "azext.isExperimental": true, + "azext.minCliCoreVersion": "2.15.0" } \ No newline at end of file diff --git a/src/databox/azext_databox/commands.py b/src/databox/azext_databox/commands.py deleted file mode 100644 index a175550e86d..00000000000 --- a/src/databox/azext_databox/commands.py +++ /dev/null @@ -1,27 +0,0 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- - -# pylint: disable=line-too-long -# pylint: disable=too-many-lines -# pylint: disable=too-many-statements -# pylint: disable=too-many-locals -from azext_databox._validators import validate_create_input_parameters -from azure.cli.core.commands import CliCommandType - - -def load_command_table(self, _): - - from azext_databox._client_factory import cf_jobs - databox_jobs = CliCommandType( - operations_tmpl='azext_databox.vendored_sdks.databox.operations._jobs_operations#JobsOperations.{}', - client_factory=cf_jobs) - with self.command_group('databox job', databox_jobs, client_factory=cf_jobs) as g: - g.custom_command('create', 'create_databox_job', validator=validate_create_input_parameters) - g.custom_command('update', 'update_databox_job') - g.custom_command('delete', 'delete_databox_job', confirmation=True) - g.custom_show_command('show', 'get_databox_job') - g.custom_command('list', 'list_databox_job') - g.custom_command('cancel', 'cancel_databox_job', confirmation=True) - g.custom_command('list-credentials', 'list_credentials_databox_job') diff --git a/src/databox/azext_databox/custom.py b/src/databox/azext_databox/custom.py index 6c84c133626..dbe9d5f9742 100644 --- a/src/databox/azext_databox/custom.py +++ b/src/databox/azext_databox/custom.py @@ -1,135 +1,17 @@ -# -------------------------------------------------------------------------------------------- +# -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- -# pylint: disable=line-too-long -# pylint: disable=too-many-statements -# pylint: disable=too-many-lines -# pylint: disable=too-many-locals -# pylint: disable=unused-argument -# pylint: disable=too-many-branches - - -def create_databox_job(client, - resource_group_name, - job_name, - location, - sku, - contact_name, - phone, - city, - email_list, - street_address1, - postal_code, - country, - state_or_province, - destination_account_details, - expected_data_size=None, - tags=None, - mobile=None, - street_address2=None, - street_address3=None, - company_name=None,): - body = {} - body['location'] = location # str - body['tags'] = tags # dictionary - body.setdefault('sku', {})['name'] = sku # str - body.setdefault('details', {})['job_details_type'] = sku - body.setdefault('details', {})['expected_data_size_in_terabytes'] = expected_data_size - body.setdefault('details', {}).setdefault('contact_details', {})['contact_name'] = contact_name # str - body.setdefault('details', {}).setdefault('contact_details', {})['phone'] = phone # str - body.setdefault('details', {}).setdefault('contact_details', {})['mobile'] = mobile # str - body.setdefault('details', {}).setdefault('contact_details', {})['email_list'] = email_list - body.setdefault('details', {}).setdefault('shipping_address', {})['street_address1'] = street_address1 # str - body.setdefault('details', {}).setdefault('shipping_address', {})['street_address2'] = street_address2 # str - body.setdefault('details', {}).setdefault('shipping_address', {})['street_address3'] = street_address3 # str - body.setdefault('details', {}).setdefault('shipping_address', {})['city'] = city # str - body.setdefault('details', {}).setdefault('shipping_address', {})['state_or_province'] = state_or_province # str - body.setdefault('details', {}).setdefault('shipping_address', {})['country'] = country # str - body.setdefault('details', {}).setdefault('shipping_address', {})['postal_code'] = postal_code # str - body.setdefault('details', {}).setdefault('shipping_address', {})['company_name'] = company_name # str - - body.setdefault('details', {})['destination_account_details'] = destination_account_details - - return client.begin_create(resource_group_name=resource_group_name, job_name=job_name, job_resource=body) - - -def update_databox_job(client, - resource_group_name, - job_name, - contact_name=None, - phone=None, - email_list=None, - street_address1=None, - postal_code=None, - country=None, - mobile=None, - city=None, - street_address2=None, - street_address3=None, - state_or_province=None, - company_name=None): - job_resource = get_databox_job(client, resource_group_name, job_name) - job_details = job_resource.details - contact_details = job_details.contact_details - shipping_address = job_details.shipping_address - - body = {} - body.setdefault('details', {}).setdefault('contact_details', {})[ - 'contact_name'] = contact_details.contact_name if contact_name is None else contact_name # str - body.setdefault('details', {}).setdefault('contact_details', {})[ - 'phone'] = contact_details.phone if phone is None else phone # str - body.setdefault('details', {}).setdefault('contact_details', {})[ - 'mobile'] = contact_details.mobile if mobile is None else mobile # str - body.setdefault('details', {}).setdefault('contact_details', {})[ - 'email_list'] = contact_details.email_list if email_list is None else email_list - body.setdefault('details', {}).setdefault('shipping_address', {})[ - 'street_address1'] = shipping_address.street_address1 if street_address1 is None else street_address1 # str - body.setdefault('details', {}).setdefault('shipping_address', {})[ - 'street_address2'] = shipping_address.street_address2 if street_address2 is None else street_address2 # str - body.setdefault('details', {}).setdefault('shipping_address', {})[ - 'street_address3'] = shipping_address.street_address3 if street_address3 is None else street_address3 # str - body.setdefault('details', {}).setdefault('shipping_address', {})[ - 'city'] = shipping_address.city if city is None else city # str - body.setdefault('details', {}).setdefault('shipping_address', {})[ - 'state_or_province'] = shipping_address.state_or_province if state_or_province is None else state_or_province # str - body.setdefault('details', {}).setdefault('shipping_address', {})[ - 'country'] = shipping_address.country if country is None else country # str - body.setdefault('details', {}).setdefault('shipping_address', {})[ - 'postal_code'] = shipping_address.postal_code if postal_code is None else postal_code # str - body.setdefault('details', {}).setdefault('shipping_address', {})[ - 'company_name'] = shipping_address.company_name if company_name is None else company_name # str - - return client.begin_update(resource_group_name=resource_group_name, job_name=job_name, job_resource_update_parameter=body) - - -def delete_databox_job(client, - resource_group_name, - job_name): - return client.begin_delete(resource_group_name=resource_group_name, job_name=job_name) - - -def get_databox_job(client, - resource_group_name, - job_name): - return client.get(resource_group_name=resource_group_name, job_name=job_name, expand='details') - - -def list_databox_job(client, - resource_group_name=None): - if resource_group_name is not None: - return client.list_by_resource_group(resource_group_name=resource_group_name) - return client.list() - - -def cancel_databox_job(client, - resource_group_name, - job_name, - reason): - return client.cancel(resource_group_name=resource_group_name, job_name=job_name, cancellation_reason={'reason': reason}) - - -def list_credentials_databox_job(client, - resource_group_name, - job_name): - return client.list_credentials(resource_group_name=resource_group_name, job_name=job_name) +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wildcard-import +# pylint: disable=unused-wildcard-import + +from .generated.custom import * # noqa: F403 +try: + from .manual.custom import * # noqa: F403 +except ImportError: + pass diff --git a/src/databox/azext_databox/vendored_sdks/databox/_version.py b/src/databox/azext_databox/generated/__init__.py similarity index 77% rename from src/databox/azext_databox/vendored_sdks/databox/_version.py rename to src/databox/azext_databox/generated/__init__.py index c47f66669f1..c9cfdc73e77 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/_version.py +++ b/src/databox/azext_databox/generated/__init__.py @@ -1,9 +1,12 @@ # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# # Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. # -------------------------------------------------------------------------- -VERSION = "1.0.0" +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/src/databox/azext_databox/generated/_client_factory.py b/src/databox/azext_databox/generated/_client_factory.py new file mode 100644 index 00000000000..58b4176b5b9 --- /dev/null +++ b/src/databox/azext_databox/generated/_client_factory.py @@ -0,0 +1,24 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + + +def cf_databox_cl(cli_ctx, *_): + from azure.cli.core.commands.client_factory import get_mgmt_service_client + from azext_databox.vendored_sdks.databox import DataBoxManagementClient + return get_mgmt_service_client(cli_ctx, + DataBoxManagementClient) + + +def cf_job(cli_ctx, *_): + return cf_databox_cl(cli_ctx).jobs + + +def cf_service(cli_ctx, *_): + return cf_databox_cl(cli_ctx).service diff --git a/src/databox/azext_databox/generated/_help.py b/src/databox/azext_databox/generated/_help.py new file mode 100644 index 00000000000..5df4e9e5879 --- /dev/null +++ b/src/databox/azext_databox/generated/_help.py @@ -0,0 +1,436 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=too-many-lines + +from knack.help_files import helps + + +helps['databox job'] = """ + type: group + short-summary: Manage job with databox +""" + +helps['databox job list'] = """ + type: command + short-summary: "Lists all the jobs available under the given resource group. And Lists all the jobs available \ +under the subscription." + examples: + - name: JobsListByResourceGroup + text: |- + az databox job list --resource-group "SdkRg5154" + - name: JobsList + text: |- + az databox job list +""" + +helps['databox job show'] = """ + type: command + short-summary: "Gets information about the specified job." + examples: + - name: JobsGet + text: |- + az databox job show --expand "details" --name "SdkJob952" --resource-group "SdkRg5154" + - name: JobsGetCmk + text: |- + az databox job show --expand "details" --name "SdkJob1735" --resource-group "SdkRg7937" + - name: JobsGetCopyStuck + text: |- + az databox job show --expand "details" --name "TJx-637505258985313014" --resource-group \ +"dmstestresource" + - name: JobsGetExport + text: |- + az databox job show --expand "details" --name "SdkJob6429" --resource-group "SdkRg8091" +""" + +helps['databox job create'] = """ + type: command + short-summary: "Creates a new job with the specified parameters. Existing job cannot be updated with this API and \ +should instead be updated with the Update job API." + parameters: + - name: --sku + short-summary: "The sku type." + long-summary: | + Usage: --sku name=XX display-name=XX family=XX + + name: Required. The sku name. + display-name: The display name of the sku. + family: The sku family. + examples: + - name: JobsCreate + text: |- + az databox job create --name "SdkJob952" --location "westus" --transfer-type "ImportToAzure" --details \ +"{\\"contactDetails\\":{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@microsoft.com\\"],\\"phone\ +\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"},\\"dataImportDetails\\":[{\\"accountDetails\\":{\\"dataAccountTyp\ +e\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourcegroups/\ +databoxbvt/providers/Microsoft.Storage/storageAccounts/databoxbvttestaccount\\"}}],\\"jobDetailsType\\":\\"DataBox\\",\ +\\"shippingAddress\\":{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\ +\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 \ +TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit 1\\"}}" --sku name="DataBox" --resource-group "SdkRg5154" + - name: JobsCreateDevicePassword + text: |- + az databox job create --name "SdkJob9640" --location "westus" --transfer-type "ImportToAzure" --details \ +"{\\"contactDetails\\":{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@microsoft.com\\"],\\"phone\ +\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"},\\"dataImportDetails\\":[{\\"accountDetails\\":{\\"dataAccountTyp\ +e\\":\\"StorageAccount\\",\\"sharePassword\\":\\"\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8\ +ff7-4a25-95c7-ce9da541242f/resourceGroups/databoxbvt1/providers/Microsoft.Storage/storageAccounts/databoxbvttestaccount\ +2\\"}}],\\"devicePassword\\":\\"\\",\\"jobDetailsType\\":\\"DataBox\\",\\"shippingAddress\\":{\\"addres\ +sType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"po\ +stalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND \ +ST\\",\\"streetAddress2\\":\\"Unit 1\\"}}" --sku name="DataBox" --resource-group "SdkRg7478" + - name: JobsCreateDoubleEncryption + text: |- + az databox job create --name "SdkJob6599" --location "westus" --transfer-type "ImportToAzure" --details \ +"{\\"contactDetails\\":{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@microsoft.com\\"],\\"phone\ +\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"},\\"dataImportDetails\\":[{\\"accountDetails\\":{\\"dataAccountTyp\ +e\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourcegroups/\ +databoxbvt/providers/Microsoft.Storage/storageAccounts/databoxbvttestaccount\\"}}],\\"jobDetailsType\\":\\"DataBox\\",\ +\\"preferences\\":{\\"encryptionPreferences\\":{\\"doubleEncryption\\":\\"Enabled\\"}},\\"shippingAddress\\":{\\"addres\ +sType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"po\ +stalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND \ +ST\\",\\"streetAddress2\\":\\"Unit 1\\"}}" --sku name="DataBox" --resource-group "SdkRg608" + - name: JobsCreateExport + text: |- + az databox job create --name "SdkJob6429" --location "westus" --transfer-type "ExportFromAzure" \ +--details "{\\"contactDetails\\":{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@microsoft.com\\"]\ +,\\"phone\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"},\\"dataExportDetails\\":[{\\"accountDetails\\":{\\"dataA\ +ccountType\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resour\ +ceGroups/akvenkat/providers/Microsoft.Storage/storageAccounts/aaaaaa2\\"},\\"transferConfiguration\\":{\\"transferAllDe\ +tails\\":{\\"include\\":{\\"dataAccountType\\":\\"StorageAccount\\",\\"transferAllBlobs\\":true,\\"transferAllFiles\\":\ +true}},\\"transferConfigurationType\\":\\"TransferAll\\"}}],\\"jobDetailsType\\":\\"DataBox\\",\\"shippingAddress\\":{\ +\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\ +\\",\\"postalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND \ +ST\\",\\"streetAddress2\\":\\"Unit 1\\"}}" --sku name="DataBox" --resource-group "SdkRg8091" + - name: JobsCreateWithUserAssignedIdentity + text: |- + az databox job create --name "SdkJob5337" --type "UserAssigned" --user-assigned-identities \ +"{\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/akvenkat/providers/Microsoft.ManagedIdentity/us\ +erAssignedIdentities/sdkIdentity\\":{}}" --location "westus" --transfer-type "ImportToAzure" --details \ +"{\\"contactDetails\\":{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@microsoft.com\\"],\\"phone\ +\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"},\\"dataImportDetails\\":[{\\"accountDetails\\":{\\"dataAccountTyp\ +e\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/\ +databoxbvt1/providers/Microsoft.Storage/storageAccounts/databoxbvttestaccount2\\"}}],\\"jobDetailsType\\":\\"DataBox\\"\ +,\\"shippingAddress\\":{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsof\ +t\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 \ +TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit 1\\"}}" --sku name="DataBox" --resource-group "SdkRg7552" +""" + +helps['databox job update'] = """ + type: command + short-summary: "Updates the properties of an existing job." + parameters: + - name: --shipping-address + short-summary: "Shipping address of the customer." + long-summary: | + Usage: --shipping-address street-address1=XX street-address2=XX street-address3=XX city=XX \ +state-or-province=XX country=XX postal-code=XX zip-extended-code=XX company-name=XX address-type=XX + + street-address1: Required. Street Address line 1. + street-address2: Street Address line 2. + street-address3: Street Address line 3. + city: Name of the City. + state-or-province: Name of the State or Province. + country: Required. Name of the Country. + postal-code: Postal code. + zip-extended-code: Extended Zip Code. + company-name: Name of the company. + address-type: Type of address. + - name: --user-assigned + short-summary: "User assigned identity properties." + long-summary: | + Usage: --user-assigned resource-id=XX + + resource-id: Arm resource id for user assigned identity to be used to fetch MSI token. + - name: --notification-preference + short-summary: "Notification preference for a job stage." + long-summary: | + Usage: --notification-preference stage-name=XX send-notification=XX + + stage-name: Required. Name of the stage. + send-notification: Required. Notification is required or not. + + Multiple actions can be specified by using more than one --notification-preference argument. + examples: + - name: JobsPatch + text: |- + az databox job update --name "SdkJob952" --contact-name "Update Job" --email-list \ +"testing@microsoft.com" --phone "1234567890" --phone-extension "1234" --shipping-address address-type="Commercial" \ +city="San Francisco" company-name="Microsoft" country="US" postal-code="94107" state-or-province="CA" \ +street-address1="16 TOWNSEND ST" street-address2="Unit 1" --resource-group "SdkRg5154" + - name: JobsPatchCmk + text: |- + az databox job update --name "SdkJob1735" --kek-type "CustomerManaged" --kek-url \ +"https://sdkkeyvault.vault.azure.net/keys/SSDKEY/" --kek-vault-resource-id "/subscriptions/fa68082f-8ff7-4a25-95c7-ce9d\ +a541242f/resourceGroups/akvenkat/providers/Microsoft.KeyVault/vaults/SDKKeyVault" --resource-group "SdkRg7937" + - name: JobsPatchSystemAssignedToUserAssigned + text: |- + az databox job update --name "SdkJob2965" --resource-identity-type "SystemAssigned,UserAssigned" \ +--user-assigned-identities "{\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/akvenkat/providers/M\ +icrosoft.ManagedIdentity/userAssignedIdentities/sdkIdentity\\":{}}" --type "UserAssigned" --user-assigned \ +resource-id="/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/akvenkat/providers/Microsoft.ManagedIde\ +ntity/userAssignedIdentities/sdkIdentity" --kek-type "CustomerManaged" --kek-url "https://sdkkeyvault.vault.azure.net/k\ +eys/SSDKEY/" --kek-vault-resource-id "/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/akvenkat/provi\ +ders/Microsoft.KeyVault/vaults/SDKKeyVault" --resource-group "SdkRg9765" +""" + +helps['databox job delete'] = """ + type: command + short-summary: "Deletes a job." + examples: + - name: JobsDelete + text: |- + az databox job delete --name "SdkJob952" --resource-group "SdkRg5154" +""" + +helps['databox job book-shipment-pick-up'] = """ + type: command + short-summary: "Book shipment pick up." + examples: + - name: BookShipmentPickupPost + text: |- + az databox job book-shipment-pick-up --name "TJ-636646322037905056" --resource-group "bvttoolrg6" \ +--end-time "2019-09-22T18:30:00Z" --shipment-location "Front desk" --start-time "2019-09-20T18:30:00Z" +""" + +helps['databox job cancel'] = """ + type: command + short-summary: "CancelJob." + examples: + - name: JobsCancelPost + text: |- + az databox job cancel --reason "CancelTest" --name "SdkJob952" --resource-group "SdkRg5154" +""" + +helps['databox job list-credentials'] = """ + type: command + short-summary: "This method gets the unencrypted secrets related to the job." + examples: + - name: JobsListCredentials + text: |- + az databox job list-credentials --name "TJ-636646322037905056" --resource-group "bvttoolrg6" +""" + +helps['databox job wait'] = """ + type: command + short-summary: Place the CLI in a waiting state until a condition of the databox job is met. + examples: + - name: Pause executing next line of CLI script until the databox job is successfully created. + text: |- + az databox job wait --expand "details" --name "SdkJob6429" --resource-group "SdkRg8091" --created + - name: Pause executing next line of CLI script until the databox job is successfully updated. + text: |- + az databox job wait --expand "details" --name "SdkJob6429" --resource-group "SdkRg8091" --updated + - name: Pause executing next line of CLI script until the databox job is successfully deleted. + text: |- + az databox job wait --expand "details" --name "SdkJob6429" --resource-group "SdkRg8091" --deleted +""" + +helps['databox'] = """ + type: group + short-summary: Manage with databox +""" + +helps['databox mitigate'] = """ + type: command + short-summary: "Request to mitigate for a given job." + examples: + - name: Mitigate + text: |- + az databox mitigate --job-name "SdkJob8367" --customer-resolution-code "MoveToCleanUpDevice" \ +--resource-group "SdkRg9836" +""" + +helps['databox service'] = """ + type: group + short-summary: Manage service with databox +""" + +helps['databox service list-available-sku-by-resource-group'] = """ + type: command + short-summary: "This method provides the list of available skus for the given subscription, resource group and \ +location." + examples: + - name: AvailableSkusPost + text: |- + az databox service list-available-sku-by-resource-group --country "US" --available-sku-request-location \ +"westus" --transfer-type "ImportToAzure" --location "westus" --resource-group "bvttoolrg6" +""" + +helps['databox service region-configuration'] = """ + type: command + short-summary: "This API provides configuration details specific to given region/location at Subscription level." + parameters: + - name: --data-box-schedule-availability-request + short-summary: "Request body to get the availability for scheduling data box orders orders." + long-summary: | + Usage: --data-box-schedule-availability-request storage-location=XX sku-name=XX country=XX + + storage-location: Required. Location for data transfer. For locations check: \ +https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01 + sku-name: Required. Sku Name for which the order is to be scheduled. + country: Country in which storage location should be supported. + - name: --disk-schedule-availability-request + short-summary: "Request body to get the availability for scheduling disk orders." + long-summary: | + Usage: --disk-schedule-availability-request expected-data-size-in-tera-bytes=XX storage-location=XX \ +sku-name=XX country=XX + + expected-data-size-in-tera-bytes: Required. The expected size of the data, which needs to be transferred \ +in this job, in terabytes. + storage-location: Required. Location for data transfer. For locations check: \ +https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01 + sku-name: Required. Sku Name for which the order is to be scheduled. + country: Country in which storage location should be supported. + - name: --heavy-schedule-availability-request + short-summary: "Request body to get the availability for scheduling heavy orders." + long-summary: | + Usage: --heavy-schedule-availability-request storage-location=XX sku-name=XX country=XX + + storage-location: Required. Location for data transfer. For locations check: \ +https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01 + sku-name: Required. Sku Name for which the order is to be scheduled. + country: Country in which storage location should be supported. + examples: + - name: RegionConfiguration + text: |- + az databox service region-configuration --location "westus" --schedule-availability-request \ +"{\\"skuName\\":\\"DataBox\\",\\"storageLocation\\":\\"westus\\"}" +""" + +helps['databox service region-configuration-by-resource-group'] = """ + type: command + short-summary: "This API provides configuration details specific to given region/location at Resource group \ +level." + parameters: + - name: --data-box-schedule-availability-request + short-summary: "Request body to get the availability for scheduling data box orders orders." + long-summary: | + Usage: --data-box-schedule-availability-request storage-location=XX sku-name=XX country=XX + + storage-location: Required. Location for data transfer. For locations check: \ +https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01 + sku-name: Required. Sku Name for which the order is to be scheduled. + country: Country in which storage location should be supported. + - name: --disk-schedule-availability-request + short-summary: "Request body to get the availability for scheduling disk orders." + long-summary: | + Usage: --disk-schedule-availability-request expected-data-size-in-tera-bytes=XX storage-location=XX \ +sku-name=XX country=XX + + expected-data-size-in-tera-bytes: Required. The expected size of the data, which needs to be transferred \ +in this job, in terabytes. + storage-location: Required. Location for data transfer. For locations check: \ +https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01 + sku-name: Required. Sku Name for which the order is to be scheduled. + country: Country in which storage location should be supported. + - name: --heavy-schedule-availability-request + short-summary: "Request body to get the availability for scheduling heavy orders." + long-summary: | + Usage: --heavy-schedule-availability-request storage-location=XX sku-name=XX country=XX + + storage-location: Required. Location for data transfer. For locations check: \ +https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01 + sku-name: Required. Sku Name for which the order is to be scheduled. + country: Country in which storage location should be supported. + examples: + - name: RegionConfigurationByResourceGroup + text: |- + az databox service region-configuration-by-resource-group --location "westus" \ +--schedule-availability-request "{\\"skuName\\":\\"DataBox\\",\\"storageLocation\\":\\"westus\\"}" --resource-group \ +"SdkRg4981" +""" + +helps['databox service validate-address'] = """ + type: command + short-summary: "[DEPRECATED NOTICE: This operation will soon be removed]. This method validates the customer \ +shipping address and provide alternate addresses if any." + parameters: + - name: --shipping-address + short-summary: "Shipping address of the customer." + long-summary: | + Usage: --shipping-address street-address1=XX street-address2=XX street-address3=XX city=XX \ +state-or-province=XX country=XX postal-code=XX zip-extended-code=XX company-name=XX address-type=XX + + street-address1: Required. Street Address line 1. + street-address2: Street Address line 2. + street-address3: Street Address line 3. + city: Name of the City. + state-or-province: Name of the State or Province. + country: Required. Name of the Country. + postal-code: Postal code. + zip-extended-code: Extended Zip Code. + company-name: Name of the company. + address-type: Type of address. + examples: + - name: ValidateAddressPost + text: |- + az databox service validate-address --location "westus" --device-type "DataBox" --shipping-address \ +address-type="Commercial" city="San Francisco" company-name="Microsoft" country="US" postal-code="94107" \ +state-or-province="CA" street-address1="16 TOWNSEND ST" street-address2="Unit 1" --validation-type "ValidateAddress" +""" + +helps['databox service validate-input'] = """ + type: command + short-summary: "This method does all necessary pre-job creation validation under subscription." + parameters: + - name: --create-job-validations + short-summary: "It does all pre-job creation validations." + long-summary: | + Usage: --create-job-validations individual-request-details=XX + + individual-request-details: Required. List of request details contain validationType and its request as \ +key and value respectively. + examples: + - name: ValidateInputs + text: |- + az databox service validate-input --location "westus" --validation-request \ +"{\\"individualRequestDetails\\":[{\\"dataImportDetails\\":[{\\"accountDetails\\":{\\"dataAccountType\\":\\"StorageAcco\ +unt\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourcegroups/databoxbvt/provider\ +s/Microsoft.Storage/storageAccounts/databoxbvttestaccount\\"}}],\\"deviceType\\":\\"DataBox\\",\\"transferType\\":\\"Im\ +portToAzure\\",\\"validationType\\":\\"ValidateDataTransferDetails\\"},{\\"deviceType\\":\\"DataBox\\",\\"shippingAddre\ +ss\\":{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\\ +":\\"US\\",\\"postalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND \ +ST\\",\\"streetAddress2\\":\\"Unit 1\\"},\\"transportPreferences\\":{\\"preferredShipmentType\\":\\"MicrosoftManaged\\"\ +},\\"validationType\\":\\"ValidateAddress\\"},{\\"validationType\\":\\"ValidateSubscriptionIsAllowedToCreateJob\\"},{\\\ +"country\\":\\"US\\",\\"deviceType\\":\\"DataBox\\",\\"location\\":\\"westus\\",\\"transferType\\":\\"ImportToAzure\\",\ +\\"validationType\\":\\"ValidateSkuAvailability\\"},{\\"deviceType\\":\\"DataBox\\",\\"validationType\\":\\"ValidateCre\ +ateOrderLimit\\"},{\\"deviceType\\":\\"DataBox\\",\\"preference\\":{\\"transportPreferences\\":{\\"preferredShipmentTyp\ +e\\":\\"MicrosoftManaged\\"}},\\"validationType\\":\\"ValidatePreferences\\"}],\\"validationCategory\\":\\"JobCreationV\ +alidation\\"}" +""" + +helps['databox service validate-input-by-resource-group'] = """ + type: command + short-summary: "This method does all necessary pre-job creation validation under resource group." + parameters: + - name: --create-job-validations + short-summary: "It does all pre-job creation validations." + long-summary: | + Usage: --create-job-validations individual-request-details=XX + + individual-request-details: Required. List of request details contain validationType and its request as \ +key and value respectively. + examples: + - name: ValidateInputsByResourceGroup + text: |- + az databox service validate-input-by-resource-group --location "westus" --resource-group "SdkRg6861" \ +--validation-request "{\\"individualRequestDetails\\":[{\\"dataImportDetails\\":[{\\"accountDetails\\":{\\"dataAccountT\ +ype\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourcegroup\ +s/databoxbvt/providers/Microsoft.Storage/storageAccounts/databoxbvttestaccount\\"}}],\\"deviceType\\":\\"DataBox\\",\\"\ +transferType\\":\\"ImportToAzure\\",\\"validationType\\":\\"ValidateDataTransferDetails\\"},{\\"deviceType\\":\\"DataBo\ +x\\",\\"shippingAddress\\":{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Micr\ +osoft\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 \ +TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit 1\\"},\\"transportPreferences\\":{\\"preferredShipmentType\\":\\"MicrosoftM\ +anaged\\"},\\"validationType\\":\\"ValidateAddress\\"},{\\"validationType\\":\\"ValidateSubscriptionIsAllowedToCreateJo\ +b\\"},{\\"country\\":\\"US\\",\\"deviceType\\":\\"DataBox\\",\\"location\\":\\"westus\\",\\"transferType\\":\\"ImportTo\ +Azure\\",\\"validationType\\":\\"ValidateSkuAvailability\\"},{\\"deviceType\\":\\"DataBox\\",\\"validationType\\":\\"Va\ +lidateCreateOrderLimit\\"},{\\"deviceType\\":\\"DataBox\\",\\"preference\\":{\\"transportPreferences\\":{\\"preferredSh\ +ipmentType\\":\\"MicrosoftManaged\\"}},\\"validationType\\":\\"ValidatePreferences\\"}],\\"validationCategory\\":\\"Job\ +CreationValidation\\"}" +""" diff --git a/src/databox/azext_databox/generated/_params.py b/src/databox/azext_databox/generated/_params.py new file mode 100644 index 00000000000..026d3460da5 --- /dev/null +++ b/src/databox/azext_databox/generated/_params.py @@ -0,0 +1,216 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=too-many-lines +# pylint: disable=too-many-statements + +from azure.cli.core.commands.parameters import ( + tags_type, + get_enum_type, + resource_group_name_type, + get_location_type +) +from azure.cli.core.commands.validators import ( + get_default_location_from_resource_group, + validate_file_or_dict +) +from azext_databox.action import ( + AddSku, + AddShippingAddress, + AddUserAssigned, + AddNotificationPreference, + AddDataBoxScheduleAvailabilityRequest, + AddDiskScheduleAvailabilityRequest, + AddHeavyScheduleAvailabilityRequest, + AddCreateJobValidations +) + + +def load_arguments(self, _): + + with self.argument_context('databox job list') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('skip_token', type=str, help='$skipToken is supported on Get list of jobs, which provides the next ' + 'page in the list of jobs.') + + with self.argument_context('databox job show') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('job_name', options_list=['--name', '-n', '--job-name'], type=str, help='The name of the job ' + 'Resource within the specified resource group. job names must be between 3 and 24 characters in ' + 'length and use any alphanumeric and underscore only', id_part='name') + c.argument('expand', type=str, help='$expand is supported on details parameter for job, which provides details ' + 'on the job stages.') + + with self.argument_context('databox job create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('job_name', options_list=['--name', '-n', '--job-name'], type=str, help='The name of the job ' + 'Resource within the specified resource group. job names must be between 3 and 24 characters in ' + 'length and use any alphanumeric and underscore only') + c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, + validator=get_default_location_from_resource_group) + c.argument('tags', tags_type) + c.argument('sku', action=AddSku, nargs='+', help='The sku type.') + c.argument('type_', options_list=['--type'], type=str, help='Identity type', arg_group='Identity') + c.argument('user_assigned_identities', type=validate_file_or_dict, help='User Assigned Identities Expected ' + 'value: json-string/@json-file.', arg_group='Identity') + c.argument('transfer_type', arg_type=get_enum_type(['ImportToAzure', 'ExportFromAzure']), help='Type of the ' + 'data transfer.') + c.argument('details', type=validate_file_or_dict, help='Details of a job run. This field will only be sent for ' + 'expand details filter. Expected value: json-string/@json-file.') + c.argument('delivery_type', arg_type=get_enum_type(['NonScheduled', 'Scheduled']), + help='Delivery type of Job.') + c.argument('scheduled_date_time', help='Scheduled date time.', arg_group='Delivery Info') + + with self.argument_context('databox job update') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('job_name', options_list=['--name', '-n', '--job-name'], type=str, help='The name of the job ' + 'Resource within the specified resource group. job names must be between 3 and 24 characters in ' + 'length and use any alphanumeric and underscore only', id_part='name') + c.argument('if_match', type=str, help='Defines the If-Match condition. The patch will be performed only if the ' + 'ETag of the job on the server matches this value.') + c.argument('tags', tags_type) + c.argument('shipping_address', action=AddShippingAddress, nargs='+', help='Shipping address of the customer.', + arg_group='Details') + c.argument('kek_type', arg_type=get_enum_type(['MicrosoftManaged', 'CustomerManaged']), help='Type of ' + 'encryption key used for key encryption.', arg_group='Details Key Encryption Key') + c.argument('kek_url', type=str, help='Key encryption key. It is required in case of Customer managed KekType.', + arg_group='Details Key Encryption Key') + c.argument('kek_vault_resource_id', type=str, help='Kek vault resource id. It is required in case of Customer ' + 'managed KekType.', arg_group='Details Key Encryption Key') + c.argument('type_', options_list=['--type'], type=str, help='Managed service identity type.', + arg_group='Details Key Encryption Key Identity Properties') + c.argument('user_assigned', action=AddUserAssigned, nargs='+', help='User assigned identity properties.', + arg_group='Details Key Encryption Key Identity Properties') + c.argument('contact_name', type=str, help='Contact name of the person.', arg_group='Details Contact Details') + c.argument('phone', type=str, help='Phone number of the contact person.', arg_group='Details Contact Details') + c.argument('phone_extension', type=str, help='Phone extension number of the contact person.', + arg_group='Details Contact Details') + c.argument('mobile', type=str, help='Mobile number of the contact person.', + arg_group='Details Contact Details') + c.argument('email_list', nargs='+', help='List of Email-ids to be notified about job progress.', + arg_group='Details Contact Details') + c.argument('notification_preference', action=AddNotificationPreference, nargs='+', help='Notification ' + 'preference for a job stage.', arg_group='Details Contact Details') + c.argument('resource_identity_type', type=str, help='Identity type', arg_group='Identity') + c.argument('user_assigned_identities', type=validate_file_or_dict, help='User Assigned Identities Expected ' + 'value: json-string/@json-file.', arg_group='Identity') + + with self.argument_context('databox job delete') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('job_name', options_list=['--name', '-n', '--job-name'], type=str, help='The name of the job ' + 'Resource within the specified resource group. job names must be between 3 and 24 characters in ' + 'length and use any alphanumeric and underscore only', id_part='name') + + with self.argument_context('databox job book-shipment-pick-up') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('job_name', options_list=['--name', '-n', '--job-name'], type=str, help='The name of the job ' + 'Resource within the specified resource group. job names must be between 3 and 24 characters in ' + 'length and use any alphanumeric and underscore only', id_part='name') + c.argument('start_time', help='Minimum date after which the pick up should commence, this must be in local ' + 'time of pick up area.') + c.argument('end_time', help='Maximum date before which the pick up should commence, this must be in local time ' + 'of pick up area.') + c.argument('shipment_location', type=str, help='Shipment Location in the pickup place. Eg.front desk') + + with self.argument_context('databox job cancel') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('job_name', options_list=['--name', '-n', '--job-name'], type=str, help='The name of the job ' + 'Resource within the specified resource group. job names must be between 3 and 24 characters in ' + 'length and use any alphanumeric and underscore only', id_part='name') + c.argument('reason', type=str, help='Reason for cancellation.') + + with self.argument_context('databox job list-credentials') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('job_name', options_list=['--name', '-n', '--job-name'], type=str, help='The name of the job ' + 'Resource within the specified resource group. job names must be between 3 and 24 characters in ' + 'length and use any alphanumeric and underscore only') + + with self.argument_context('databox job wait') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('job_name', options_list=['--name', '-n', '--job-name'], type=str, help='The name of the job ' + 'Resource within the specified resource group. job names must be between 3 and 24 characters in ' + 'length and use any alphanumeric and underscore only', id_part='name') + c.argument('expand', type=str, help='$expand is supported on details parameter for job, which provides details ' + 'on the job stages.') + + with self.argument_context('databox mitigate') as c: + c.argument('job_name', type=str, help='The name of the job Resource within the specified resource group. job ' + 'names must be between 3 and 24 characters in length and use any alphanumeric and underscore only', + id_part='name') + c.argument('resource_group_name', resource_group_name_type) + c.argument('customer_resolution_code', arg_type=get_enum_type(['None', 'MoveToCleanUpDevice', 'Resume']), + help='Resolution code for the job') + + with self.argument_context('databox service list-available-sku-by-resource-group') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, + validator=get_default_location_from_resource_group) + c.argument('transfer_type', arg_type=get_enum_type(['ImportToAzure', 'ExportFromAzure']), help='Type of the ' + 'transfer.') + c.argument('country', type=str, help='ISO country code. Country for hardware shipment. For codes check: ' + 'https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements') + c.argument('available_sku_request_location', type=str, help='Location for data transfer. For locations check: ' + 'https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01') + c.argument('sku_names', nargs='+', help='Sku Names to filter for available skus') + + with self.argument_context('databox service region-configuration') as c: + c.argument('location', arg_type=get_location_type(self.cli_ctx), id_part='name') + c.argument('data_box_schedule_availability_request', action=AddDataBoxScheduleAvailabilityRequest, nargs='+', + help='Request body to get the availability for scheduling data box orders orders.', + arg_group='ScheduleAvailabilityRequest') + c.argument('disk_schedule_availability_request', action=AddDiskScheduleAvailabilityRequest, nargs='+', + help='Request body to get the availability for scheduling disk orders.', + arg_group='ScheduleAvailabilityRequest') + c.argument('heavy_schedule_availability_request', action=AddHeavyScheduleAvailabilityRequest, nargs='+', + help='Request body to get the availability for scheduling heavy orders.', + arg_group='ScheduleAvailabilityRequest') + c.argument('sku_name', arg_type=get_enum_type(['DataBox', 'DataBoxDisk', 'DataBoxHeavy']), help='Type of the ' + 'device.', arg_group='Transport Availability Request') + + with self.argument_context('databox service region-configuration-by-resource-group') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, + validator=get_default_location_from_resource_group, id_part='name') + c.argument('data_box_schedule_availability_request', action=AddDataBoxScheduleAvailabilityRequest, nargs='+', + help='Request body to get the availability for scheduling data box orders orders.', + arg_group='ScheduleAvailabilityRequest') + c.argument('disk_schedule_availability_request', action=AddDiskScheduleAvailabilityRequest, nargs='+', + help='Request body to get the availability for scheduling disk orders.', + arg_group='ScheduleAvailabilityRequest') + c.argument('heavy_schedule_availability_request', action=AddHeavyScheduleAvailabilityRequest, nargs='+', + help='Request body to get the availability for scheduling heavy orders.', + arg_group='ScheduleAvailabilityRequest') + c.argument('sku_name', arg_type=get_enum_type(['DataBox', 'DataBoxDisk', 'DataBoxHeavy']), help='Type of the ' + 'device.', arg_group='Transport Availability Request') + + with self.argument_context('databox service validate-address') as c: + c.argument('location', arg_type=get_location_type(self.cli_ctx), id_part='name') + c.argument('validation_type', arg_type=get_enum_type(['ValidateAddress', 'ValidateSubscriptionIsAllowedToCreate' + 'Job', 'ValidatePreferences', 'ValidateCreateOrderLimit', + 'ValidateSkuAvailability', + 'ValidateDataTransferDetails']), help='Identifies the ' + 'type of validation request.') + c.argument('shipping_address', action=AddShippingAddress, nargs='+', help='Shipping address of the customer.') + c.argument('device_type', arg_type=get_enum_type(['DataBox', 'DataBoxDisk', 'DataBoxHeavy']), help='Device ' + 'type to be used for the job.') + c.argument('preferred_shipment_type', arg_type=get_enum_type(['CustomerManaged', 'MicrosoftManaged']), + help='Indicates Shipment Logistics type that the customer preferred.', arg_group='Transport ' + 'Preferences') + + with self.argument_context('databox service validate-input') as c: + c.argument('location', arg_type=get_location_type(self.cli_ctx), id_part='name') + c.argument('create_job_validations', action=AddCreateJobValidations, nargs='+', help='It does all pre-job ' + 'creation validations.', arg_group='ValidationRequest') + + with self.argument_context('databox service validate-input-by-resource-group') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, + validator=get_default_location_from_resource_group, id_part='name') + c.argument('create_job_validations', action=AddCreateJobValidations, nargs='+', help='It does all pre-job ' + 'creation validations.', arg_group='ValidationRequest') diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/_version.py b/src/databox/azext_databox/generated/_validators.py similarity index 82% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/_version.py rename to src/databox/azext_databox/generated/_validators.py index eae7c95b6fb..b33a44c1ebf 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/_version.py +++ b/src/databox/azext_databox/generated/_validators.py @@ -1,9 +1,9 @@ -# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# # Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. # -------------------------------------------------------------------------- - -VERSION = "0.1.0" diff --git a/src/databox/azext_databox/generated/action.py b/src/databox/azext_databox/generated/action.py new file mode 100644 index 00000000000..3c46e32567b --- /dev/null +++ b/src/databox/azext_databox/generated/action.py @@ -0,0 +1,254 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=protected-access + +import argparse +from collections import defaultdict +from knack.util import CLIError + + +class AddSku(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.sku = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'name': + d['name'] = v[0] + elif kl == 'display-name': + d['display_name'] = v[0] + elif kl == 'family': + d['family'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter sku. All possible keys are: name, ' + 'display-name, family'.format(k)) + return d + + +class AddShippingAddress(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.shipping_address = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + d['address_type'] = "None" + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'street-address1': + d['street_address1'] = v[0] + elif kl == 'street-address2': + d['street_address2'] = v[0] + elif kl == 'street-address3': + d['street_address3'] = v[0] + elif kl == 'city': + d['city'] = v[0] + elif kl == 'state-or-province': + d['state_or_province'] = v[0] + elif kl == 'country': + d['country'] = v[0] + elif kl == 'postal-code': + d['postal_code'] = v[0] + elif kl == 'zip-extended-code': + d['zip_extended_code'] = v[0] + elif kl == 'company-name': + d['company_name'] = v[0] + elif kl == 'address-type': + d['address_type'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter shipping_address. All possible keys are: ' + 'street-address1, street-address2, street-address3, city, state-or-province, country, ' + 'postal-code, zip-extended-code, company-name, address-type'.format(k)) + return d + + +class AddUserAssigned(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.user_assigned = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'resource-id': + d['resource_id'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter user_assigned. All possible keys are: ' + 'resource-id'.format(k)) + return d + + +class AddNotificationPreference(argparse._AppendAction): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + super(AddNotificationPreference, self).__call__(parser, namespace, action, option_string) + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + d['send_notification'] = True + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'stage-name': + d['stage_name'] = v[0] + elif kl == 'send-notification': + d['send_notification'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter notification_preference. All possible ' + 'keys are: stage-name, send-notification'.format(k)) + return d + + +class AddDataBoxScheduleAvailabilityRequest(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.data_box_schedule_availability_request = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'storage-location': + d['storage_location'] = v[0] + elif kl == 'country': + d['country'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter data_box_schedule_availability_request. ' + 'All possible keys are: storage-location, country'.format(k)) + d['sku_name'] = 'DataBox' + return d + + +class AddDiskScheduleAvailabilityRequest(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.disk_schedule_availability_request = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'expected-data-size-in-tera-bytes': + d['expected_data_size_in_tera_bytes'] = v[0] + elif kl == 'storage-location': + d['storage_location'] = v[0] + elif kl == 'country': + d['country'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter disk_schedule_availability_request. All ' + 'possible keys are: expected-data-size-in-tera-bytes, storage-location, country'.format(k)) + d['sku_name'] = 'DataBoxDisk' + return d + + +class AddHeavyScheduleAvailabilityRequest(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.heavy_schedule_availability_request = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'storage-location': + d['storage_location'] = v[0] + elif kl == 'country': + d['country'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter heavy_schedule_availability_request. All ' + 'possible keys are: storage-location, country'.format(k)) + d['sku_name'] = 'DataBoxHeavy' + return d + + +class AddCreateJobValidations(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.create_job_validations = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + d['validation_category'] = "JobCreationValidation" + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'individual-request-details': + d['individual_request_details'] = v + else: + raise CLIError('Unsupported Key {} is provided for parameter create_job_validations. All possible keys ' + 'are: individual-request-details'.format(k)) + d['validation_category'] = 'JobCreationValidation' + return d diff --git a/src/databox/azext_databox/generated/commands.py b/src/databox/azext_databox/generated/commands.py new file mode 100644 index 00000000000..82c988a5774 --- /dev/null +++ b/src/databox/azext_databox/generated/commands.py @@ -0,0 +1,53 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=too-many-statements +# pylint: disable=too-many-locals + +from azure.cli.core.commands import CliCommandType + + +def load_command_table(self, _): + + from azext_databox.generated._client_factory import cf_job + databox_job = CliCommandType( + operations_tmpl='azext_databox.vendored_sdks.databox.operations._jobs_operations#JobsOperations.{}', + client_factory=cf_job) + with self.command_group('databox job', databox_job, client_factory=cf_job) as g: + g.custom_command('list', 'databox_job_list') + g.custom_show_command('show', 'databox_job_show') + g.custom_command('create', 'databox_job_create', supports_no_wait=True) + g.custom_command('update', 'databox_job_update', supports_no_wait=True) + g.custom_command('delete', 'databox_job_delete', supports_no_wait=True, confirmation=True) + g.custom_command('book-shipment-pick-up', 'databox_job_book_shipment_pick_up') + g.custom_command('cancel', 'databox_job_cancel') + g.custom_command('list-credentials', 'databox_job_list_credentials') + g.custom_wait_command('wait', 'databox_job_show') + + from azext_databox.generated._client_factory import cf_databox + databox_ = CliCommandType( + operations_tmpl='azext_databox.vendored_sdks.databox.operations._model_operations#DataBoxManagementClientOperat' + 'ionsMixin.{}', + client_factory=cf_databox) + with self.command_group('databox', databox_, client_factory=cf_databox, is_experimental=True) as g: + g.custom_command('mitigate', 'databox_mitigate') + + from azext_databox.generated._client_factory import cf_service + databox_service = CliCommandType( + operations_tmpl='azext_databox.vendored_sdks.databox.operations._service_operations#ServiceOperations.{}', + client_factory=cf_service) + with self.command_group('databox service', databox_service, client_factory=cf_service) as g: + g.custom_command('list-available-sku-by-resource-group', + 'databox_service_list_available_sku_by_resource_group') + g.custom_command('region-configuration', 'databox_service_region_configuration') + g.custom_command('region-configuration-by-resource-group', 'databox_service_region_configuration_by_resource_gr' + 'oup') + g.custom_command('validate-address', 'databox_service_validate_address') + g.custom_command('validate-input', 'databox_service_validate_input') + g.custom_command('validate-input-by-resource-group', 'databox_service_validate_input_by_resource_group') diff --git a/src/databox/azext_databox/generated/custom.py b/src/databox/azext_databox/generated/custom.py new file mode 100644 index 00000000000..21cd767957b --- /dev/null +++ b/src/databox/azext_databox/generated/custom.py @@ -0,0 +1,289 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=line-too-long +# pylint: disable=too-many-lines + +from knack.util import CLIError +from azure.cli.core.util import sdk_no_wait + + +def databox_job_list(client, + resource_group_name=None, + skip_token=None): + if resource_group_name: + return client.list_by_resource_group(resource_group_name=resource_group_name, + skip_token=skip_token) + return client.list(skip_token=skip_token) + + +def databox_job_show(client, + resource_group_name, + job_name, + expand=None): + return client.get(resource_group_name=resource_group_name, + job_name=job_name, + expand=expand) + + +def databox_job_create(client, + resource_group_name, + job_name, + location, + sku, + transfer_type, + tags=None, + type_=None, + user_assigned_identities=None, + details=None, + delivery_type=None, + scheduled_date_time=None, + no_wait=False): + if type_ is None: + type_ = "None" + if delivery_type is None: + delivery_type = "NonScheduled" + job_resource = {} + job_resource['location'] = location + job_resource['tags'] = tags + job_resource['sku'] = sku + job_resource['identity'] = {} + job_resource['identity']['type'] = "None" if type_ is None else type_ + job_resource['identity']['user_assigned_identities'] = user_assigned_identities + job_resource['details'] = details + job_resource['delivery_type'] = "NonScheduled" if delivery_type is None else delivery_type + job_resource['delivery_info'] = {} + job_resource['delivery_info']['scheduled_date_time'] = scheduled_date_time + return sdk_no_wait(no_wait, + client.begin_create, + resource_group_name=resource_group_name, + job_name=job_name, + job_resource=job_resource) + + +def databox_job_update(client, + resource_group_name, + job_name, + if_match=None, + tags=None, + shipping_address=None, + kek_type=None, + kek_url=None, + kek_vault_resource_id=None, + type_=None, + user_assigned=None, + contact_name=None, + phone=None, + phone_extension=None, + mobile=None, + email_list=None, + notification_preference=None, + resource_identity_type=None, + user_assigned_identities=None, + no_wait=False): + if kek_type is None: + kek_type = "MicrosoftManaged" + if resource_identity_type is None: + resource_identity_type = "None" + job_resource_update_parameter = {} + job_resource_update_parameter['tags'] = tags + job_resource_update_parameter['details'] = {} + job_resource_update_parameter['details']['shipping_address'] = shipping_address + job_resource_update_parameter['details']['key_encryption_key'] = {} + job_resource_update_parameter['details']['key_encryption_key']['kek_type'] = "MicrosoftManaged" if kek_type is None else kek_type + job_resource_update_parameter['details']['key_encryption_key']['kek_url'] = kek_url + job_resource_update_parameter['details']['key_encryption_key']['kek_vault_resource_id'] = kek_vault_resource_id + job_resource_update_parameter['contact_details'] = {} + job_resource_update_parameter['contact_details']['contact_name'] = contact_name + job_resource_update_parameter['contact_details']['phone'] = phone + job_resource_update_parameter['contact_details']['phone_extension'] = phone_extension + job_resource_update_parameter['contact_details']['mobile'] = mobile + job_resource_update_parameter['contact_details']['email_list'] = email_list + job_resource_update_parameter['contact_details']['notification_preference'] = notification_preference + job_resource_update_parameter['identity'] = {} + job_resource_update_parameter['identity']['type'] = "None" if resource_identity_type is None else resource_identity_type + job_resource_update_parameter['identity']['user_assigned_identities'] = user_assigned_identities + return sdk_no_wait(no_wait, + client.begin_update, + resource_group_name=resource_group_name, + job_name=job_name, + if_match=if_match, + job_resource_update_parameter=job_resource_update_parameter) + + +def databox_job_delete(client, + resource_group_name, + job_name, + no_wait=False): + return sdk_no_wait(no_wait, + client.begin_delete, + resource_group_name=resource_group_name, + job_name=job_name) + + +def databox_job_book_shipment_pick_up(client, + resource_group_name, + job_name, + start_time, + end_time, + shipment_location): + shipment_pick_up_request = {} + shipment_pick_up_request['start_time'] = start_time + shipment_pick_up_request['end_time'] = end_time + shipment_pick_up_request['shipment_location'] = shipment_location + return client.book_shipment_pick_up(resource_group_name=resource_group_name, + job_name=job_name, + shipment_pick_up_request=shipment_pick_up_request) + + +def databox_job_cancel(client, + resource_group_name, + job_name, + reason): + cancellation_reason = {} + cancellation_reason['reason'] = reason + return client.cancel(resource_group_name=resource_group_name, + job_name=job_name, + cancellation_reason=cancellation_reason) + + +def databox_job_list_credentials(client, + resource_group_name, + job_name): + return client.list_credentials(resource_group_name=resource_group_name, + job_name=job_name) + + +def databox_mitigate(client, + job_name, + resource_group_name, + customer_resolution_code): + mitigate_job_request = {} + mitigate_job_request['customer_resolution_code'] = customer_resolution_code + return client.mitigate(job_name=job_name, + resource_group_name=resource_group_name, + mitigate_job_request=mitigate_job_request) + + +def databox_service_list_available_sku_by_resource_group(client, + resource_group_name, + location, + transfer_type, + country, + available_sku_request_location, + sku_names=None): + available_sku_request = {} + available_sku_request['transfer_type'] = transfer_type + available_sku_request['country'] = country + available_sku_request['location'] = available_sku_request_location + available_sku_request['sku_names'] = sku_names + return client.list_available_skus_by_resource_group(resource_group_name=resource_group_name, + location=location, + available_sku_request=available_sku_request) + + +def databox_service_region_configuration(client, + location, + data_box_schedule_availability_request=None, + disk_schedule_availability_request=None, + heavy_schedule_availability_request=None, + sku_name=None): + all_schedule_availability_request = [] + if data_box_schedule_availability_request is not None: + all_schedule_availability_request.append(data_box_schedule_availability_request) + if disk_schedule_availability_request is not None: + all_schedule_availability_request.append(disk_schedule_availability_request) + if heavy_schedule_availability_request is not None: + all_schedule_availability_request.append(heavy_schedule_availability_request) + if len(all_schedule_availability_request) > 1: + raise CLIError('at most one of data_box_schedule_availability_request, disk_schedule_availability_request, ' + 'heavy_schedule_availability_request is needed for schedule_availability_request!') + schedule_availability_request = all_schedule_availability_request[0] if len(all_schedule_availability_request) == \ + 1 else None + region_configuration_request = {} + region_configuration_request['schedule_availability_request'] = schedule_availability_request + region_configuration_request['transport_availability_request'] = {} + region_configuration_request['transport_availability_request']['sku_name'] = sku_name + return client.region_configuration(location=location, + region_configuration_request=region_configuration_request) + + +def databox_service_region_configuration_by_resource_group(client, + resource_group_name, + location, + data_box_schedule_availability_request=None, + disk_schedule_availability_request=None, + heavy_schedule_availability_request=None, + sku_name=None): + all_schedule_availability_request = [] + if data_box_schedule_availability_request is not None: + all_schedule_availability_request.append(data_box_schedule_availability_request) + if disk_schedule_availability_request is not None: + all_schedule_availability_request.append(disk_schedule_availability_request) + if heavy_schedule_availability_request is not None: + all_schedule_availability_request.append(heavy_schedule_availability_request) + if len(all_schedule_availability_request) > 1: + raise CLIError('at most one of data_box_schedule_availability_request, disk_schedule_availability_request, ' + 'heavy_schedule_availability_request is needed for schedule_availability_request!') + schedule_availability_request = all_schedule_availability_request[0] if len(all_schedule_availability_request) == \ + 1 else None + region_configuration_request = {} + region_configuration_request['schedule_availability_request'] = schedule_availability_request + region_configuration_request['transport_availability_request'] = {} + region_configuration_request['transport_availability_request']['sku_name'] = sku_name + return client.region_configuration_by_resource_group(resource_group_name=resource_group_name, + location=location, + region_configuration_request=region_configuration_request) + + +def databox_service_validate_address(client, + location, + validation_type, + shipping_address, + device_type, + preferred_shipment_type=None): + validate_address = {} + validate_address['shipping_address'] = shipping_address + validate_address['device_type'] = device_type + validate_address['transport_preferences'] = {} + validate_address['transport_preferences']['preferred_shipment_type'] = preferred_shipment_type + return client.validate_address(location=location, + validate_address=validate_address) + + +def databox_service_validate_input(client, + location, + create_job_validations=None): + all_validation_request = [] + if create_job_validations is not None: + all_validation_request.append(create_job_validations) + if len(all_validation_request) > 1: + raise CLIError('at most one of create_job_validations is needed for validation_request!') + if len(all_validation_request) != 1: + raise CLIError('validation_request is required. but none of create_job_validations is provided!') + validation_request = all_validation_request[0] if len(all_validation_request) == 1 else None + return client.validate_inputs(location=location, + validation_request=validation_request) + + +def databox_service_validate_input_by_resource_group(client, + resource_group_name, + location, + create_job_validations=None): + all_validation_request = [] + if create_job_validations is not None: + all_validation_request.append(create_job_validations) + if len(all_validation_request) > 1: + raise CLIError('at most one of create_job_validations is needed for validation_request!') + if len(all_validation_request) != 1: + raise CLIError('validation_request is required. but none of create_job_validations is provided!') + validation_request = all_validation_request[0] if len(all_validation_request) == 1 else None + return client.validate_inputs_by_resource_group(resource_group_name=resource_group_name, + location=location, + validation_request=validation_request) diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/__init__.py b/src/databox/azext_databox/manual/__init__.py similarity index 73% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/__init__.py rename to src/databox/azext_databox/manual/__init__.py index bb6b75a72db..c9cfdc73e77 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/__init__.py +++ b/src/databox/azext_databox/manual/__init__.py @@ -1,10 +1,12 @@ # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# # Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. # -------------------------------------------------------------------------- -from ._data_box_management_client import DataBoxManagementClient -__all__ = ['DataBoxManagementClient'] +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/src/databox/azext_databox/tests/__init__.py b/src/databox/azext_databox/tests/__init__.py new file mode 100644 index 00000000000..70488e93851 --- /dev/null +++ b/src/databox/azext_databox/tests/__init__.py @@ -0,0 +1,116 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +import inspect +import logging +import os +import sys +import traceback +import datetime as dt + +from azure.core.exceptions import AzureError +from azure.cli.testsdk.exceptions import CliTestError, CliExecutionError, JMESPathCheckAssertionError + + +logger = logging.getLogger('azure.cli.testsdk') +logger.addHandler(logging.StreamHandler()) +__path__ = __import__('pkgutil').extend_path(__path__, __name__) +exceptions = [] +test_map = dict() +SUCCESSED = "successed" +FAILED = "failed" + + +def try_manual(func): + def import_manual_function(origin_func): + from importlib import import_module + decorated_path = inspect.getfile(origin_func).lower() + module_path = __path__[0].lower() + if not decorated_path.startswith(module_path): + raise Exception("Decorator can only be used in submodules!") + manual_path = os.path.join( + decorated_path[module_path.rfind(os.path.sep) + 1:]) + manual_file_path, manual_file_name = os.path.split(manual_path) + module_name, _ = os.path.splitext(manual_file_name) + manual_module = "..manual." + \ + ".".join(manual_file_path.split(os.path.sep) + [module_name, ]) + return getattr(import_module(manual_module, package=__name__), origin_func.__name__) + + def get_func_to_call(): + func_to_call = func + try: + func_to_call = import_manual_function(func) + logger.info("Found manual override for %s(...)", func.__name__) + except (ImportError, AttributeError): + pass + return func_to_call + + def wrapper(*args, **kwargs): + func_to_call = get_func_to_call() + logger.info("running %s()...", func.__name__) + try: + test_map[func.__name__] = dict() + test_map[func.__name__]["result"] = SUCCESSED + test_map[func.__name__]["error_message"] = "" + test_map[func.__name__]["error_stack"] = "" + test_map[func.__name__]["error_normalized"] = "" + test_map[func.__name__]["start_dt"] = dt.datetime.utcnow() + ret = func_to_call(*args, **kwargs) + except (AssertionError, AzureError, CliTestError, CliExecutionError, SystemExit, + JMESPathCheckAssertionError) as e: + use_exception_cache = os.getenv("TEST_EXCEPTION_CACHE") + if use_exception_cache is None or use_exception_cache.lower() != "true": + raise + test_map[func.__name__]["end_dt"] = dt.datetime.utcnow() + test_map[func.__name__]["result"] = FAILED + test_map[func.__name__]["error_message"] = str(e).replace("\r\n", " ").replace("\n", " ")[:500] + test_map[func.__name__]["error_stack"] = traceback.format_exc().replace( + "\r\n", " ").replace("\n", " ")[:500] + logger.info("--------------------------------------") + logger.info("step exception: %s", e) + logger.error("--------------------------------------") + logger.error("step exception in %s: %s", func.__name__, e) + logger.info(traceback.format_exc()) + exceptions.append((func.__name__, sys.exc_info())) + else: + test_map[func.__name__]["end_dt"] = dt.datetime.utcnow() + return ret + + if inspect.isclass(func): + return get_func_to_call() + return wrapper + + +def calc_coverage(filename): + filename = filename.split(".")[0] + coverage_name = filename + "_coverage.md" + with open(coverage_name, "w") as f: + f.write("|Scenario|Result|ErrorMessage|ErrorStack|ErrorNormalized|StartDt|EndDt|\n") + total = len(test_map) + covered = 0 + for k, v in test_map.items(): + if not k.startswith("step_"): + total -= 1 + continue + if v["result"] == SUCCESSED: + covered += 1 + f.write("|{step_name}|{result}|{error_message}|{error_stack}|{error_normalized}|{start_dt}|" + "{end_dt}|\n".format(step_name=k, **v)) + f.write("Coverage: {}/{}\n".format(covered, total)) + print("Create coverage\n", file=sys.stderr) + + +def raise_if(): + if exceptions: + if len(exceptions) <= 1: + raise exceptions[0][1][1] + message = "{}\nFollowed with exceptions in other steps:\n".format(str(exceptions[0][1][1])) + message += "\n".join(["{}: {}".format(h[0], h[1][1]) for h in exceptions[1:]]) + raise exceptions[0][1][0](message).with_traceback(exceptions[0][1][2]) diff --git a/src/databox/azext_databox/tests/latest/__init__.py b/src/databox/azext_databox/tests/latest/__init__.py new file mode 100644 index 00000000000..c9cfdc73e77 --- /dev/null +++ b/src/databox/azext_databox/tests/latest/__init__.py @@ -0,0 +1,12 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/src/databox/azext_databox/tests/latest/example_steps.py b/src/databox/azext_databox/tests/latest/example_steps.py new file mode 100644 index 00000000000..6d89057b081 --- /dev/null +++ b/src/databox/azext_databox/tests/latest/example_steps.py @@ -0,0 +1,436 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + + +from .. import try_manual + + +# EXAMPLE: /Jobs/put/JobsCreate +@try_manual +def step_job_create(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=None): + if checks is None: + checks = [] + test.cmd('az databox job create ' + '--name "{myJob}" ' + '--location "westus" ' + '--transfer-type "ImportToAzure" ' + '--details "{{\\"contactDetails\\":{{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@m' + 'icrosoft.com\\"],\\"phone\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"}},\\"dataImportDetails\\":[' + '{{\\"accountDetails\\":{{\\"dataAccountType\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscripti' + 'ons/{subscription_id}/resourcegroups/{rg_5}/providers/Microsoft.Storage/storageAccounts/{sa}\\"}}}}],\\"j' + 'obDetailsType\\":\\"DataBox\\",\\"shippingAddress\\":{{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"S' + 'an Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"' + 'stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit ' + '1\\"}}}}" ' + '--sku name="DataBox" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Jobs/put/JobsCreateDevicePassword +@try_manual +def step_job_create2(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, + rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox job create ' + '--name "{myJob5}" ' + '--location "westus" ' + '--transfer-type "ImportToAzure" ' + '--details "{{\\"contactDetails\\":{{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@m' + 'icrosoft.com\\"],\\"phone\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"}},\\"dataImportDetails\\":[' + '{{\\"accountDetails\\":{{\\"dataAccountType\\":\\"StorageAccount\\",\\"sharePassword\\":\\"\\",\\"storageAccountId\\":\\"/subscriptions/{subscription_id}/resourceGroups/{rg_6}/providers/Microsoft' + '.Storage/storageAccounts/{sa_2}\\"}}}}],\\"devicePassword\\":\\"\\",\\"jobDetailsType\\":' + '\\"DataBox\\",\\"shippingAddress\\":{{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San ' + 'Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"sta' + 'teOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit ' + '1\\"}}}}" ' + '--sku name="DataBox" ' + '--resource-group "{rg_7}"', + checks=checks) + + +# EXAMPLE: /Jobs/put/JobsCreateDoubleEncryption +@try_manual +def step_job_create3(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, + rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox job create ' + '--name "{myJob6}" ' + '--location "westus" ' + '--transfer-type "ImportToAzure" ' + '--details "{{\\"contactDetails\\":{{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@m' + 'icrosoft.com\\"],\\"phone\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"}},\\"dataImportDetails\\":[' + '{{\\"accountDetails\\":{{\\"dataAccountType\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscripti' + 'ons/{subscription_id}/resourcegroups/{rg_5}/providers/Microsoft.Storage/storageAccounts/{sa}\\"}}}}],\\"j' + 'obDetailsType\\":\\"DataBox\\",\\"preferences\\":{{\\"encryptionPreferences\\":{{\\"doubleEncryption\\":' + '\\"Enabled\\"}}}},\\"shippingAddress\\":{{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San ' + 'Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"sta' + 'teOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit ' + '1\\"}}}}" ' + '--sku name="DataBox" ' + '--resource-group "{rg_8}"', + checks=checks) + + +# EXAMPLE: /Jobs/put/JobsCreateExport +@try_manual +def step_job_create4(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, + rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox job create ' + '--name "{myJob4}" ' + '--location "westus" ' + '--transfer-type "ExportFromAzure" ' + '--details "{{\\"contactDetails\\":{{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@m' + 'icrosoft.com\\"],\\"phone\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"}},\\"dataExportDetails\\":[' + '{{\\"accountDetails\\":{{\\"dataAccountType\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscripti' + 'ons/{subscription_id}/resourceGroups/{rg_9}/providers/Microsoft.Storage/storageAccounts/{sa_3}\\"}},\\"tr' + 'ansferConfiguration\\":{{\\"transferAllDetails\\":{{\\"include\\":{{\\"dataAccountType\\":\\"StorageAccou' + 'nt\\",\\"transferAllBlobs\\":true,\\"transferAllFiles\\":true}}}},\\"transferConfigurationType\\":\\"Tran' + 'sferAll\\"}}}}],\\"jobDetailsType\\":\\"DataBox\\",\\"shippingAddress\\":{{\\"addressType\\":\\"Commercia' + 'l\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"postalCod' + 'e\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND ' + 'ST\\",\\"streetAddress2\\":\\"Unit 1\\"}}}}" ' + '--sku name="DataBox" ' + '--resource-group "{rg_4}"', + checks=checks) + + +# EXAMPLE: /Jobs/put/JobsCreateWithUserAssignedIdentity +@try_manual +def step_job_create5(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, + rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox job create ' + '--name "{myJob7}" ' + '--type "UserAssigned" ' + '--user-assigned-identities "{{\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/akven' + 'kat/providers/Microsoft.ManagedIdentity/userAssignedIdentities/sdkIdentity\\":{{}}}}" ' + '--location "westus" ' + '--transfer-type "ImportToAzure" ' + '--details "{{\\"contactDetails\\":{{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@m' + 'icrosoft.com\\"],\\"phone\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"}},\\"dataImportDetails\\":[' + '{{\\"accountDetails\\":{{\\"dataAccountType\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscripti' + 'ons/{subscription_id}/resourceGroups/{rg_6}/providers/Microsoft.Storage/storageAccounts/{sa_2}\\"}}}}],\\' + '"jobDetailsType\\":\\"DataBox\\",\\"shippingAddress\\":{{\\"addressType\\":\\"Commercial\\",\\"city\\":\\' + '"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",' + '\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit ' + '1\\"}}}}" ' + '--sku name="DataBox" ' + '--resource-group "{rg_10}"', + checks=checks) + + +# EXAMPLE: /Jobs/get/JobsGet +@try_manual +def step_job_show(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=None): + if checks is None: + checks = [] + test.cmd('az databox job show ' + '--expand "details" ' + '--name "{myJob}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Jobs/get/JobsGetCmk +@try_manual +def step_job_show2(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=None): + if checks is None: + checks = [] + test.cmd('az databox job show ' + '--expand "details" ' + '--name "{myJob2}" ' + '--resource-group "{rg_2}"', + checks=checks) + + +# EXAMPLE: /Jobs/get/JobsGetCopyStuck +@try_manual +def step_job_show3(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=None): + if checks is None: + checks = [] + test.cmd('az databox job show ' + '--expand "details" ' + '--name "{myJob3}" ' + '--resource-group "{rg_3}"', + checks=checks) + + +# EXAMPLE: /Jobs/get/JobsGetExport +@try_manual +def step_job_show4(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=None): + if checks is None: + checks = [] + test.cmd('az databox job show ' + '--expand "details" ' + '--name "{myJob4}" ' + '--resource-group "{rg_4}"', + checks=checks) + + +# EXAMPLE: /Jobs/get/JobsList +@try_manual +def step_job_list(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=None): + if checks is None: + checks = [] + test.cmd('az databox job list ' + '-g ""', + checks=checks) + + +# EXAMPLE: /Jobs/get/JobsListByResourceGroup +@try_manual +def step_job_list2(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=None): + if checks is None: + checks = [] + test.cmd('az databox job list ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Jobs/patch/JobsPatch +@try_manual +def step_job_update(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=None): + if checks is None: + checks = [] + test.cmd('az databox job update ' + '--name "{myJob}" ' + '--contact-name "Update Job" ' + '--email-list "testing@microsoft.com" ' + '--phone "1234567890" ' + '--phone-extension "1234" ' + '--shipping-address address-type="Commercial" city="San Francisco" company-name="Microsoft" country="US" ' + 'postal-code="94107" state-or-province="CA" street-address1="16 TOWNSEND ST" street-address2="Unit 1" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Jobs/patch/JobsPatchCmk +@try_manual +def step_job_update2(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, + rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox job update ' + '--name "{myJob2}" ' + '--kek-type "CustomerManaged" ' + '--kek-url "https://sdkkeyvault.vault.azure.net/keys/SSDKEY/" ' + '--kek-vault-resource-id "/subscriptions/{subscription_id}/resourceGroups/{rg_9}/providers/Microsoft.KeyVa' + 'ult/vaults/SDKKeyVault" ' + '--resource-group "{rg_2}"', + checks=checks) + + +# EXAMPLE: /Jobs/patch/JobsPatchSystemAssignedToUserAssigned +@try_manual +def step_job_update3(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, + rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox job update ' + '--name "{myJob8}" ' + '--resource-identity-type "SystemAssigned,UserAssigned" ' + '--user-assigned-identities "{{\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/akven' + 'kat/providers/Microsoft.ManagedIdentity/userAssignedIdentities/sdkIdentity\\":{{}}}}" ' + '--type "UserAssigned" ' + '--user-assigned resource-id="/subscriptions/{subscription_id}/resourceGroups/{rg_9}/providers/Microsoft.M' + 'anagedIdentity/userAssignedIdentities/sdkIdentity" ' + '--kek-type "CustomerManaged" ' + '--kek-url "https://sdkkeyvault.vault.azure.net/keys/SSDKEY/" ' + '--kek-vault-resource-id "/subscriptions/{subscription_id}/resourceGroups/{rg_9}/providers/Microsoft.KeyVa' + 'ult/vaults/SDKKeyVault" ' + '--resource-group "{rg_11}"', + checks=checks) + + +# EXAMPLE: /Jobs/post/BookShipmentPickupPost +@try_manual +def step_job_book_shipment_pick_up(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, + rg_13, rg_14, rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox job book-shipment-pick-up ' + '--name "{myJob9}" ' + '--resource-group "{rg_12}" ' + '--end-time "2019-09-22T18:30:00Z" ' + '--shipment-location "Front desk" ' + '--start-time "2019-09-20T18:30:00Z"', + checks=checks) + + +# EXAMPLE: /Jobs/post/JobsCancelPost +@try_manual +def step_job_cancel(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=None): + if checks is None: + checks = [] + test.cmd('az databox job cancel ' + '--reason "CancelTest" ' + '--name "{myJob}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Jobs/post/JobsListCredentials +@try_manual +def step_job_list_credentials(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, + rg_14, rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox job list-credentials ' + '--name "{myJob9}" ' + '--resource-group "{rg_12}"', + checks=checks) + + +# EXAMPLE: /databox/post/Mitigate +@try_manual +def step_mitigate(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=None): + if checks is None: + checks = [] + test.cmd('az databox mitigate ' + '--job-name "{myJob10}" ' + '--customer-resolution-code "MoveToCleanUpDevice" ' + '--resource-group "{rg_13}"', + checks=checks) + + +# EXAMPLE: /Jobs/delete/JobsDelete +@try_manual +def step_job_delete(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=None): + if checks is None: + checks = [] + test.cmd('az databox job delete -y ' + '--name "{myJob}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Service/post/AvailableSkusPost +@try_manual +def step_service_list_available_sku_by_resource_group(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, + rg_11, rg_12, rg_13, rg_14, rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox service list-available-sku-by-resource-group ' + '--country "US" ' + '--available-sku-request-location "westus" ' + '--transfer-type "ImportToAzure" ' + '--location "westus" ' + '--resource-group "{rg_12}"', + checks=checks) + + +# EXAMPLE: /Service/post/RegionConfiguration +@try_manual +def step_service_region_configuration(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, + rg_13, rg_14, rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox service region-configuration ' + '--location "westus" ' + '--schedule-availability-request "{{\\"skuName\\":\\"DataBox\\",\\"storageLocation\\":\\"westus\\"}}"', + checks=checks) + + +# EXAMPLE: /Service/post/RegionConfigurationByResourceGroup +@try_manual +def step_service_region_configuration2(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, + rg_13, rg_14, rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox service region-configuration-by-resource-group ' + '--location "westus" ' + '--schedule-availability-request "{{\\"skuName\\":\\"DataBox\\",\\"storageLocation\\":\\"westus\\"}}" ' + '--resource-group "{rg_14}"', + checks=checks) + + +# EXAMPLE: /Service/post/ValidateAddressPost +@try_manual +def step_service_validate_address(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, + rg_14, rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox service validate-address ' + '--location "westus" ' + '--device-type "DataBox" ' + '--shipping-address address-type="Commercial" city="San Francisco" company-name="Microsoft" country="US" ' + 'postal-code="94107" state-or-province="CA" street-address1="16 TOWNSEND ST" street-address2="Unit 1" ' + '--validation-type "ValidateAddress"', + checks=checks) + + +# EXAMPLE: /Service/post/ValidateInputs +@try_manual +def step_service_validate_input(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, + rg_14, rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox service validate-input ' + '--location "westus" ' + '--validation-request "{{\\"individualRequestDetails\\":[{{\\"dataImportDetails\\":[{{\\"accountDetails\\"' + ':{{\\"dataAccountType\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/{subscription_id}/' + 'resourcegroups/{rg_5}/providers/Microsoft.Storage/storageAccounts/{sa}\\"}}}}],\\"deviceType\\":\\"DataBo' + 'x\\",\\"transferType\\":\\"ImportToAzure\\",\\"validationType\\":\\"ValidateDataTransferDetails\\"}},{{\\' + '"deviceType\\":\\"DataBox\\",\\"shippingAddress\\":{{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San' + ' Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"st' + 'ateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit ' + '1\\"}},\\"transportPreferences\\":{{\\"preferredShipmentType\\":\\"MicrosoftManaged\\"}},\\"validationTyp' + 'e\\":\\"ValidateAddress\\"}},{{\\"validationType\\":\\"ValidateSubscriptionIsAllowedToCreateJob\\"}},{{\\' + '"country\\":\\"US\\",\\"deviceType\\":\\"DataBox\\",\\"location\\":\\"westus\\",\\"transferType\\":\\"Imp' + 'ortToAzure\\",\\"validationType\\":\\"ValidateSkuAvailability\\"}},{{\\"deviceType\\":\\"DataBox\\",\\"va' + 'lidationType\\":\\"ValidateCreateOrderLimit\\"}},{{\\"deviceType\\":\\"DataBox\\",\\"preference\\":{{\\"t' + 'ransportPreferences\\":{{\\"preferredShipmentType\\":\\"MicrosoftManaged\\"}}}},\\"validationType\\":\\"V' + 'alidatePreferences\\"}}],\\"validationCategory\\":\\"JobCreationValidation\\"}}"', + checks=checks) + + +# EXAMPLE: /Service/post/ValidateInputsByResourceGroup +@try_manual +def step_service_validate_input_by_resource_group(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, + rg_11, rg_12, rg_13, rg_14, rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox service validate-input-by-resource-group ' + '--location "westus" ' + '--resource-group "{rg_15}" ' + '--validation-request "{{\\"individualRequestDetails\\":[{{\\"dataImportDetails\\":[{{\\"accountDetails\\"' + ':{{\\"dataAccountType\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/{subscription_id}/' + 'resourcegroups/{rg_5}/providers/Microsoft.Storage/storageAccounts/{sa}\\"}}}}],\\"deviceType\\":\\"DataBo' + 'x\\",\\"transferType\\":\\"ImportToAzure\\",\\"validationType\\":\\"ValidateDataTransferDetails\\"}},{{\\' + '"deviceType\\":\\"DataBox\\",\\"shippingAddress\\":{{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San' + ' Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"st' + 'ateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit ' + '1\\"}},\\"transportPreferences\\":{{\\"preferredShipmentType\\":\\"MicrosoftManaged\\"}},\\"validationTyp' + 'e\\":\\"ValidateAddress\\"}},{{\\"validationType\\":\\"ValidateSubscriptionIsAllowedToCreateJob\\"}},{{\\' + '"country\\":\\"US\\",\\"deviceType\\":\\"DataBox\\",\\"location\\":\\"westus\\",\\"transferType\\":\\"Imp' + 'ortToAzure\\",\\"validationType\\":\\"ValidateSkuAvailability\\"}},{{\\"deviceType\\":\\"DataBox\\",\\"va' + 'lidationType\\":\\"ValidateCreateOrderLimit\\"}},{{\\"deviceType\\":\\"DataBox\\",\\"preference\\":{{\\"t' + 'ransportPreferences\\":{{\\"preferredShipmentType\\":\\"MicrosoftManaged\\"}}}},\\"validationType\\":\\"V' + 'alidatePreferences\\"}}],\\"validationCategory\\":\\"JobCreationValidation\\"}}"', + checks=checks) + diff --git a/src/databox/azext_databox/tests/latest/test_databox_scenario.py b/src/databox/azext_databox/tests/latest/test_databox_scenario.py index a19ecfbd149..29ec1d8989c 100644 --- a/src/databox/azext_databox/tests/latest/test_databox_scenario.py +++ b/src/databox/azext_databox/tests/latest/test_databox_scenario.py @@ -1,143 +1,159 @@ -# -------------------------------------------------------------------------------------------- +# -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=line-too-long import os +from azure.cli.testsdk import ScenarioTest +from azure.cli.testsdk import ResourceGroupPreparer +from azure.cli.testsdk import StorageAccountPreparer +from .example_steps import step_job_create +from .example_steps import step_job_show +from .example_steps import step_job_list2 +from .example_steps import step_job_list +from .example_steps import step_job_book_shipment_pick_up +from .example_steps import step_job_list_credentials +from .example_steps import step_job_cancel +from .example_steps import step_job_update +from .example_steps import step_service_validate_address +from .example_steps import step_service_list_available_sku_by_resource_group +from .example_steps import step_job_delete +from .. import ( + try_manual, + raise_if, + calc_coverage +) -from azure.cli.testsdk import (ScenarioTest, ResourceGroupPreparer, StorageAccountPreparer, JMESPathCheck) TEST_DIR = os.path.abspath(os.path.join(os.path.abspath(__file__), '..')) -class DataBoxScenarioTest(ScenarioTest): +# Env setup_scenario +@try_manual +def setup_scenario(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, + rg_15): + pass + + +# Env cleanup_scenario +@try_manual +def cleanup_scenario(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, + rg_15): + pass + + +# Testcase: Scenario +@try_manual +def call_scenario(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15): + setup_scenario(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15) + step_job_create(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=[ + test.check("name", "{myJob}", case_sensitive=False), + test.check("location", "westus", case_sensitive=False), + test.check("transferType", "ImportToAzure", case_sensitive=False), + test.check("sku.name", "DataBox", case_sensitive=False), + ]) + # STEP NOT FOUND: JobsGet6 + # STEP NOT FOUND: JobsGet5 + # STEP NOT FOUND: JobsGet4 + # STEP NOT FOUND: JobsGet3 + # STEP NOT FOUND: JobsGet2 + # STEP NOT FOUND: JobsGet1 + step_job_show(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=[ + test.check("name", "{myJob}", case_sensitive=False), + test.check("location", "westus", case_sensitive=False), + test.check("transferType", "ImportToAzure", case_sensitive=False), + test.check("sku.name", "DataBox", case_sensitive=False), + ]) + step_job_list2(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=[ + test.check('length(@)', 1), + ]) + step_job_list(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=[ + test.check('length(@)', 8), + ]) + # STEP NOT FOUND: OperationsGet + # STEP NOT FOUND: ServiceValidateInputsByResourceGroup + # STEP NOT FOUND: AvailableSkusByResourceGroup + step_job_book_shipment_pick_up(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, + rg_13, rg_14, rg_15, checks=[]) + step_job_list_credentials(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, + rg_14, rg_15, checks=[]) + step_job_cancel(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=[]) + step_job_update(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=[ + test.check("name", "{myJob}", case_sensitive=False), + ]) + # STEP NOT FOUND: ServiceRegionConfiguration + step_service_validate_address(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, + rg_14, rg_15, checks=[]) + # STEP NOT FOUND: ServiceValidateInputs + step_service_list_available_sku_by_resource_group(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, + rg_11, rg_12, rg_13, rg_14, rg_15, checks=[]) + step_job_delete(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=[]) + # STEP NOT FOUND: JobMitigate + cleanup_scenario(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, + rg_15) + + +# Test class for Scenario +@try_manual +class DataboxScenarioTest(ScenarioTest): + + def __init__(self, *args, **kwargs): + super(DataboxScenarioTest, self).__init__(*args, **kwargs) + self.kwargs.update({ + 'subscription_id': self.get_subscription_id() + }) - @ResourceGroupPreparer(name_prefix='cli_test_databox') - @StorageAccountPreparer(parameter_name='storage_account_1') - @StorageAccountPreparer(parameter_name='storage_account_2') - def test_databox(self, storage_account_1, storage_account_2): - job_name = self.create_random_name('job', 24) - job_name_2 = self.create_random_name('job', 24) self.kwargs.update({ - 'job_name': job_name, - 'job_name_2': job_name_2, - 'storage_account_1': storage_account_1, - 'storage_account_2': storage_account_2 + 'myJob': 'SdkJob952', + 'myJob2': 'SdkJob1735', + 'myJob3': 'TJx-637505258985313014', + 'myJob4': 'SdkJob6429', + 'myJob5': 'SdkJob9640', + 'myJob6': 'SdkJob6599', + 'myJob7': 'SdkJob5337', + 'myJob8': 'SdkJob2965', + 'myJob9': 'TJ-636646322037905056', + 'myJob10': 'SdkJob8367', }) - # Create a databox job with sku 'DataBox'. - self.cmd('databox job create ' - '--resource-group {rg} ' - '--name {job_name} ' - '--location westus ' - '--sku DataBox ' - '--contact-name "Public SDK Test" ' - '--phone 14258828080 ' - '--email-list testing@microsoft.com ' - '--street-address1 "1 MICROSOFT WAY" ' - '--city Redmond ' - '--state-or-province WA ' - '--country US ' - '--postal-code 98052 ' - '--company-name Microsoft ' - '--storage-account {storage_account_1} {storage_account_2} ' - '--staging-storage-account {storage_account_1} ' - '--resource-group-for-managed-disk rg-for-managed-disk', - checks=[JMESPathCheck('status', 'DeviceOrdered')]) - - self.cmd('databox job update ' - '--resource-group {rg} ' - '--name {job_name} ' - '--contact-name "Public SDK Test 1" ' - '--email-list testing1@microsoft.com', - checks=[]) - - self.cmd('databox job show ' - '--resource-group {rg} ' - '--name {job_name}', - checks=[ - JMESPathCheck('name', job_name), - JMESPathCheck('isCancellable', True), - JMESPathCheck('isDeletable', False), - JMESPathCheck('details.contactDetails.contactName', 'Public SDK Test 1'), - JMESPathCheck('details.contactDetails.emailList[0]', 'testing1@microsoft.com')]) - - self.cmd('databox job list ' - '--resource-group {rg}', - checks=[JMESPathCheck('length(@)', 1)]) - - self.cmd('databox job cancel ' - '--resource-group {rg} ' - '--name {job_name} ' - '--reason "CancelTest" ' - '-y', - checks=[]) - - self.cmd('databox job show ' - '--resource-group {rg} ' - '--name {job_name}', - checks=[ - JMESPathCheck('name', job_name), - JMESPathCheck('isCancellable', False), - JMESPathCheck('isDeletable', True)]) - - self.cmd('databox job delete ' - '--resource-group {rg} ' - '--name {job_name} ' - '-y', - checks=[]) - - self.cmd('databox job show ' - '--resource-group {rg} ' - '--name {job_name}', - expect_failure=True) - - # Create another databox job with sku 'DataBoxDisk'. - self.cmd('databox job create ' - '--resource-group {rg} ' - '--name {job_name_2} ' - '--location westus ' - '--sku DataBoxDisk ' - '--expected-data-size 1 ' - '--contact-name "Public SDK Test" ' - '--phone 14258828080 ' - '--email-list testing@microsoft.com ' - '--street-address1 "1 MICROSOFT WAY" ' - '--city Redmond ' - '--state-or-province WA ' - '--country US ' - '--postal-code 98052 ' - '--company-name Microsoft ' - '--storage-account {storage_account_1}', - checks=[JMESPathCheck('status', 'DeviceOrdered')]) - - self.cmd('databox job cancel ' - '--resource-group {rg} ' - '--name {job_name_2} ' - '--reason "CancelTest" ' - '-y', - checks=[]) - - self.cmd('databox job delete ' - '--resource-group {rg} ' - '--name {job_name_2} ' - '-y', - checks=[]) - - self.cmd('databox job show ' - '--resource-group {rg} ' - '--name {job_name_2}', - expect_failure=True) - - # DataBox service will create a lock 'DATABOX_SERVICE' on the storage account under the resource group when creating a job. In order to clean up the resource group, we need delete the lock first. - self.cmd('lock delete ' - '--name DATABOX_SERVICE ' - '-g {rg} ' - '--resource-name {storage_account_1} ' - '--resource-type Microsoft.Storage/storageAccounts') - - self.cmd('lock delete ' - '--name DATABOX_SERVICE ' - '-g {rg} ' - '--resource-name {storage_account_2} ' - '--resource-type Microsoft.Storage/storageAccounts') + + @ResourceGroupPreparer(name_prefix='clitestdatabox_databoxbvt'[:7], key='rg_5', parameter_name='rg_5') + @ResourceGroupPreparer(name_prefix='clitestdatabox_databoxbvt1'[:7], key='rg_6', parameter_name='rg_6') + @ResourceGroupPreparer(name_prefix='clitestdatabox_akvenkat'[:7], key='rg_9', parameter_name='rg_9') + @ResourceGroupPreparer(name_prefix='clitestdatabox_SdkRg5154'[:7], key='rg', parameter_name='rg') + @ResourceGroupPreparer(name_prefix='clitestdatabox_SdkRg7937'[:7], key='rg_2', parameter_name='rg_2') + @ResourceGroupPreparer(name_prefix='clitestdatabox_dmstestresource'[:7], key='rg_3', parameter_name='rg_3') + @ResourceGroupPreparer(name_prefix='clitestdatabox_SdkRg8091'[:7], key='rg_4', parameter_name='rg_4') + @ResourceGroupPreparer(name_prefix='clitestdatabox_SdkRg7478'[:7], key='rg_7', parameter_name='rg_7') + @ResourceGroupPreparer(name_prefix='clitestdatabox_SdkRg608'[:7], key='rg_8', parameter_name='rg_8') + @ResourceGroupPreparer(name_prefix='clitestdatabox_SdkRg7552'[:7], key='rg_10', parameter_name='rg_10') + @ResourceGroupPreparer(name_prefix='clitestdatabox_SdkRg9765'[:7], key='rg_11', parameter_name='rg_11') + @ResourceGroupPreparer(name_prefix='clitestdatabox_bvttoolrg6'[:7], key='rg_12', parameter_name='rg_12') + @ResourceGroupPreparer(name_prefix='clitestdatabox_SdkRg9836'[:7], key='rg_13', parameter_name='rg_13') + @ResourceGroupPreparer(name_prefix='clitestdatabox_SdkRg4981'[:7], key='rg_14', parameter_name='rg_14') + @ResourceGroupPreparer(name_prefix='clitestdatabox_SdkRg6861'[:7], key='rg_15', parameter_name='rg_15') + @StorageAccountPreparer(name_prefix='clitestdatabox_databoxbvttestaccount'[:7], key='sa', + resource_group_parameter_name='rg_5') + @StorageAccountPreparer(name_prefix='clitestdatabox_databoxbvttestaccount2'[:7], key='sa_2', + resource_group_parameter_name='rg_6') + @StorageAccountPreparer(name_prefix='clitestdatabox_aaaaaa2'[:7], key='sa_3', + resource_group_parameter_name='rg_9') + def test_databox_Scenario(self, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, + rg_14, rg_15): + call_scenario(self, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15) + calc_coverage(__file__) + raise_if() + diff --git a/src/databox/azext_databox/vendored_sdks/__init__.py b/src/databox/azext_databox/vendored_sdks/__init__.py index 7183870ee56..c9cfdc73e77 100644 --- a/src/databox/azext_databox/vendored_sdks/__init__.py +++ b/src/databox/azext_databox/vendored_sdks/__init__.py @@ -1,6 +1,12 @@ -# -------------------------------------------------------------------------------------------- +# coding=utf-8 +# -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- -__path__ = __import__('pkgutil').extend_path(__path__, __name__) \ No newline at end of file +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/src/databox/azext_databox/vendored_sdks/databox/_configuration.py b/src/databox/azext_databox/vendored_sdks/databox/_configuration.py index 5b8f07cf262..611b250d000 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/_configuration.py +++ b/src/databox/azext_databox/vendored_sdks/databox/_configuration.py @@ -1,21 +1,24 @@ # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# +# Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any + +from typing import TYPE_CHECKING from azure.core.configuration import Configuration from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMHttpLoggingPolicy -from ._version import VERSION +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any + + from azure.core.credentials import TokenCredential +VERSION = "unknown" class DataBoxManagementClientConfiguration(Configuration): """Configuration for DataBoxManagementClient. @@ -44,8 +47,9 @@ def __init__( self.credential = credential self.subscription_id = subscription_id + self.api_version = "2021-03-01" self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) - kwargs.setdefault('sdk_moniker', 'azure-mgmt-databox/{}'.format(VERSION)) + kwargs.setdefault('sdk_moniker', 'databoxmanagementclient/{}'.format(VERSION)) self._configure(**kwargs) def _configure( diff --git a/src/databox/azext_databox/vendored_sdks/databox/_data_box_management_client.py b/src/databox/azext_databox/vendored_sdks/databox/_data_box_management_client.py index aa0fa23980a..7821ea5ad3b 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/_data_box_management_client.py +++ b/src/databox/azext_databox/vendored_sdks/databox/_data_box_management_client.py @@ -1,175 +1,80 @@ # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# +# Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from azure.mgmt.core import ARMPipelineClient -from msrest import Serializer, Deserializer - -from azure.profiles import KnownProfiles, ProfileDefinition -from azure.profiles.multiapiclient import MultiApiClientMixin -from ._configuration import DataBoxManagementClientConfiguration +from typing import TYPE_CHECKING -class _SDKClient(object): - def __init__(self, *args, **kwargs): - """This is a fake class to support current implemetation of MultiApiClientMixin." - Will be removed in final version of multiapi azure-core based client - """ - pass +from azure.mgmt.core import ARMPipelineClient +from msrest import Deserializer, Serializer -class DataBoxManagementClient(MultiApiClientMixin, _SDKClient): - """The DataBox Client. +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Optional - This ready contains multiple API versions, to help you deal with all of the Azure clouds - (Azure Stack, Azure Government, Azure China, etc.). - By default, it uses the latest API version available on public Azure. - For production, you should stick to a particular api-version and/or profile. - The profile sets a mapping between an operation group and its API version. - The api-version parameter sets the default API version if the operation - group is not described in the profile. + from azure.core.credentials import TokenCredential +from ._configuration import DataBoxManagementClientConfiguration +from .operations import Operations +from .operations import JobsOperations +from .operations import DataBoxManagementClientOperationsMixin +from .operations import ServiceOperations +from . import models + + +class DataBoxManagementClient(DataBoxManagementClientOperationsMixin): + """DataBoxManagementClient. + + :ivar operations: Operations operations + :vartype operations: data_box_management_client.operations.Operations + :ivar jobs: JobsOperations operations + :vartype jobs: data_box_management_client.operations.JobsOperations + :ivar service: ServiceOperations operations + :vartype service: data_box_management_client.operations.ServiceOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials.TokenCredential :param subscription_id: The Subscription Id. :type subscription_id: str - :param str api_version: API version to use if no profile is provided, or if - missing in profile. :param str base_url: Service URL - :param profile: A profile definition, from KnownProfiles to dict. - :type profile: azure.profiles.KnownProfiles :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ - DEFAULT_API_VERSION = '2019-09-01' - _PROFILE_TAG = "azure.mgmt.databox.DataBoxManagementClient" - LATEST_PROFILE = ProfileDefinition({ - _PROFILE_TAG: { - None: DEFAULT_API_VERSION, - }}, - _PROFILE_TAG + " latest" - ) - def __init__( self, credential, # type: "TokenCredential" subscription_id, # type: str - api_version=None, - base_url=None, - profile=KnownProfiles.default, + base_url=None, # type: Optional[str] **kwargs # type: Any ): + # type: (...) -> None if not base_url: base_url = 'https://management.azure.com' self._config = DataBoxManagementClientConfiguration(credential, subscription_id, **kwargs) self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) - super(DataBoxManagementClient, self).__init__( - api_version=api_version, - profile=profile - ) - - @classmethod - def _models_dict(cls, api_version): - return {k: v for k, v in cls.models(api_version).__dict__.items() if isinstance(v, type)} - - @classmethod - def models(cls, api_version=DEFAULT_API_VERSION): - """Module depends on the API version: - * 2018-01-01: :mod:`v2018_01_01.models` - * 2019-09-01: :mod:`v2019_09_01.models` - * 2020-04-01: :mod:`v2020_04_01.models` - * 2020-11-01: :mod:`v2020_11_01.models` - """ - if api_version == '2018-01-01': - from .v2018_01_01 import models - return models - elif api_version == '2019-09-01': - from .v2019_09_01 import models - return models - elif api_version == '2020-04-01': - from .v2020_04_01 import models - return models - elif api_version == '2020-11-01': - from .v2020_11_01 import models - return models - raise ValueError("API version {} is not available".format(api_version)) + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) - @property - def jobs(self): - """Instance depends on the API version: - - * 2018-01-01: :class:`JobsOperations` - * 2019-09-01: :class:`JobsOperations` - * 2020-04-01: :class:`JobsOperations` - * 2020-11-01: :class:`JobsOperations` - """ - api_version = self._get_api_version('jobs') - if api_version == '2018-01-01': - from .v2018_01_01.operations import JobsOperations as OperationClass - elif api_version == '2019-09-01': - from .v2019_09_01.operations import JobsOperations as OperationClass - elif api_version == '2020-04-01': - from .v2020_04_01.operations import JobsOperations as OperationClass - elif api_version == '2020-11-01': - from .v2020_11_01.operations import JobsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'jobs'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def operations(self): - """Instance depends on the API version: - - * 2018-01-01: :class:`Operations` - * 2019-09-01: :class:`Operations` - * 2020-04-01: :class:`Operations` - * 2020-11-01: :class:`Operations` - """ - api_version = self._get_api_version('operations') - if api_version == '2018-01-01': - from .v2018_01_01.operations import Operations as OperationClass - elif api_version == '2019-09-01': - from .v2019_09_01.operations import Operations as OperationClass - elif api_version == '2020-04-01': - from .v2020_04_01.operations import Operations as OperationClass - elif api_version == '2020-11-01': - from .v2020_11_01.operations import Operations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'operations'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def service(self): - """Instance depends on the API version: - - * 2018-01-01: :class:`ServiceOperations` - * 2019-09-01: :class:`ServiceOperations` - * 2020-04-01: :class:`ServiceOperations` - * 2020-11-01: :class:`ServiceOperations` - """ - api_version = self._get_api_version('service') - if api_version == '2018-01-01': - from .v2018_01_01.operations import ServiceOperations as OperationClass - elif api_version == '2019-09-01': - from .v2019_09_01.operations import ServiceOperations as OperationClass - elif api_version == '2020-04-01': - from .v2020_04_01.operations import ServiceOperations as OperationClass - elif api_version == '2020-11-01': - from .v2020_11_01.operations import ServiceOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'service'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) + self.operations = Operations( + self._client, self._config, self._serialize, self._deserialize) + self.jobs = JobsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.service = ServiceOperations( + self._client, self._config, self._serialize, self._deserialize) def close(self): + # type: () -> None self._client.close() + def __enter__(self): + # type: () -> DataBoxManagementClient self._client.__enter__() return self + def __exit__(self, *exc_details): + # type: (Any) -> None self._client.__exit__(*exc_details) diff --git a/src/databox/azext_databox/vendored_sdks/databox/aio/_configuration.py b/src/databox/azext_databox/vendored_sdks/databox/aio/_configuration.py index 71fd4699f32..6e61d68ca3c 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/aio/_configuration.py +++ b/src/databox/azext_databox/vendored_sdks/databox/aio/_configuration.py @@ -1,21 +1,22 @@ # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# +# Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any + +from typing import Any, TYPE_CHECKING from azure.core.configuration import Configuration from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMHttpLoggingPolicy -from .._version import VERSION +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential +VERSION = "unknown" class DataBoxManagementClientConfiguration(Configuration): """Configuration for DataBoxManagementClient. @@ -31,9 +32,9 @@ class DataBoxManagementClientConfiguration(Configuration): def __init__( self, - credential, # type: "AsyncTokenCredential" - subscription_id, # type: str - **kwargs # type: Any + credential: "AsyncTokenCredential", + subscription_id: str, + **kwargs: Any ) -> None: if credential is None: raise ValueError("Parameter 'credential' must not be None.") @@ -43,8 +44,9 @@ def __init__( self.credential = credential self.subscription_id = subscription_id + self.api_version = "2021-03-01" self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) - kwargs.setdefault('sdk_moniker', 'azure-mgmt-databox/{}'.format(VERSION)) + kwargs.setdefault('sdk_moniker', 'databoxmanagementclient/{}'.format(VERSION)) self._configure(**kwargs) def _configure( diff --git a/src/databox/azext_databox/vendored_sdks/databox/aio/_data_box_management_client.py b/src/databox/azext_databox/vendored_sdks/databox/aio/_data_box_management_client.py index 3d3baff1574..a8aea887805 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/aio/_data_box_management_client.py +++ b/src/databox/azext_databox/vendored_sdks/databox/aio/_data_box_management_client.py @@ -1,175 +1,74 @@ # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# +# Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from azure.mgmt.core import AsyncARMPipelineClient -from msrest import Serializer, Deserializer - -from azure.profiles import KnownProfiles, ProfileDefinition -from azure.profiles.multiapiclient import MultiApiClientMixin -from ._configuration import DataBoxManagementClientConfiguration - -class _SDKClient(object): - def __init__(self, *args, **kwargs): - """This is a fake class to support current implemetation of MultiApiClientMixin." - Will be removed in final version of multiapi azure-core based client - """ - pass +from typing import Any, Optional, TYPE_CHECKING -class DataBoxManagementClient(MultiApiClientMixin, _SDKClient): - """The DataBox Client. +from azure.mgmt.core import AsyncARMPipelineClient +from msrest import Deserializer, Serializer - This ready contains multiple API versions, to help you deal with all of the Azure clouds - (Azure Stack, Azure Government, Azure China, etc.). - By default, it uses the latest API version available on public Azure. - For production, you should stick to a particular api-version and/or profile. - The profile sets a mapping between an operation group and its API version. - The api-version parameter sets the default API version if the operation - group is not described in the profile. +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential +from ._configuration import DataBoxManagementClientConfiguration +from .operations import Operations +from .operations import JobsOperations +from .operations import DataBoxManagementClientOperationsMixin +from .operations import ServiceOperations +from .. import models + + +class DataBoxManagementClient(DataBoxManagementClientOperationsMixin): + """DataBoxManagementClient. + + :ivar operations: Operations operations + :vartype operations: data_box_management_client.aio.operations.Operations + :ivar jobs: JobsOperations operations + :vartype jobs: data_box_management_client.aio.operations.JobsOperations + :ivar service: ServiceOperations operations + :vartype service: data_box_management_client.aio.operations.ServiceOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: The Subscription Id. :type subscription_id: str - :param str api_version: API version to use if no profile is provided, or if - missing in profile. :param str base_url: Service URL - :param profile: A profile definition, from KnownProfiles to dict. - :type profile: azure.profiles.KnownProfiles :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ - DEFAULT_API_VERSION = '2020-11-01' - _PROFILE_TAG = "azure.mgmt.databox.DataBoxManagementClient" - LATEST_PROFILE = ProfileDefinition({ - _PROFILE_TAG: { - None: DEFAULT_API_VERSION, - }}, - _PROFILE_TAG + " latest" - ) - def __init__( self, - credential, # type: "AsyncTokenCredential" - subscription_id, # type: str - api_version=None, - base_url=None, - profile=KnownProfiles.default, - **kwargs # type: Any + credential: "AsyncTokenCredential", + subscription_id: str, + base_url: Optional[str] = None, + **kwargs: Any ) -> None: if not base_url: base_url = 'https://management.azure.com' self._config = DataBoxManagementClientConfiguration(credential, subscription_id, **kwargs) self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) - super(DataBoxManagementClient, self).__init__( - api_version=api_version, - profile=profile - ) - - @classmethod - def _models_dict(cls, api_version): - return {k: v for k, v in cls.models(api_version).__dict__.items() if isinstance(v, type)} - @classmethod - def models(cls, api_version=DEFAULT_API_VERSION): - """Module depends on the API version: + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) - * 2018-01-01: :mod:`v2018_01_01.models` - * 2019-09-01: :mod:`v2019_09_01.models` - * 2020-04-01: :mod:`v2020_04_01.models` - * 2020-11-01: :mod:`v2020_11_01.models` - """ - if api_version == '2018-01-01': - from ..v2018_01_01 import models - return models - elif api_version == '2019-09-01': - from ..v2019_09_01 import models - return models - elif api_version == '2020-04-01': - from ..v2020_04_01 import models - return models - elif api_version == '2020-11-01': - from ..v2020_11_01 import models - return models - raise ValueError("API version {} is not available".format(api_version)) + self.operations = Operations( + self._client, self._config, self._serialize, self._deserialize) + self.jobs = JobsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.service = ServiceOperations( + self._client, self._config, self._serialize, self._deserialize) - @property - def jobs(self): - """Instance depends on the API version: - - * 2018-01-01: :class:`JobsOperations` - * 2019-09-01: :class:`JobsOperations` - * 2020-04-01: :class:`JobsOperations` - * 2020-11-01: :class:`JobsOperations` - """ - api_version = self._get_api_version('jobs') - if api_version == '2018-01-01': - from ..v2018_01_01.aio.operations import JobsOperations as OperationClass - elif api_version == '2019-09-01': - from ..v2019_09_01.aio.operations import JobsOperations as OperationClass - elif api_version == '2020-04-01': - from ..v2020_04_01.aio.operations import JobsOperations as OperationClass - elif api_version == '2020-11-01': - from ..v2020_11_01.aio.operations import JobsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'jobs'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def operations(self): - """Instance depends on the API version: - - * 2018-01-01: :class:`Operations` - * 2019-09-01: :class:`Operations` - * 2020-04-01: :class:`Operations` - * 2020-11-01: :class:`Operations` - """ - api_version = self._get_api_version('operations') - if api_version == '2018-01-01': - from ..v2018_01_01.aio.operations import Operations as OperationClass - elif api_version == '2019-09-01': - from ..v2019_09_01.aio.operations import Operations as OperationClass - elif api_version == '2020-04-01': - from ..v2020_04_01.aio.operations import Operations as OperationClass - elif api_version == '2020-11-01': - from ..v2020_11_01.aio.operations import Operations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'operations'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def service(self): - """Instance depends on the API version: - - * 2018-01-01: :class:`ServiceOperations` - * 2019-09-01: :class:`ServiceOperations` - * 2020-04-01: :class:`ServiceOperations` - * 2020-11-01: :class:`ServiceOperations` - """ - api_version = self._get_api_version('service') - if api_version == '2018-01-01': - from ..v2018_01_01.aio.operations import ServiceOperations as OperationClass - elif api_version == '2019-09-01': - from ..v2019_09_01.aio.operations import ServiceOperations as OperationClass - elif api_version == '2020-04-01': - from ..v2020_04_01.aio.operations import ServiceOperations as OperationClass - elif api_version == '2020-11-01': - from ..v2020_11_01.aio.operations import ServiceOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'service'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - async def close(self): + async def close(self) -> None: await self._client.close() - async def __aenter__(self): + + async def __aenter__(self) -> "DataBoxManagementClient": await self._client.__aenter__() return self - async def __aexit__(self, *exc_details): + + async def __aexit__(self, *exc_details) -> None: await self._client.__aexit__(*exc_details) diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/operations/__init__.py b/src/databox/azext_databox/vendored_sdks/databox/aio/operations/__init__.py similarity index 83% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/operations/__init__.py rename to src/databox/azext_databox/vendored_sdks/databox/aio/operations/__init__.py index 9c8fa7a8253..bd13cc67afb 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/operations/__init__.py +++ b/src/databox/azext_databox/vendored_sdks/databox/aio/operations/__init__.py @@ -8,10 +8,12 @@ from ._operations import Operations from ._jobs_operations import JobsOperations +from ._data_box_management_client_operations import DataBoxManagementClientOperationsMixin from ._service_operations import ServiceOperations __all__ = [ 'Operations', 'JobsOperations', + 'DataBoxManagementClientOperationsMixin', 'ServiceOperations', ] diff --git a/src/databox/azext_databox/vendored_sdks/databox/aio/operations/_data_box_management_client_operations.py b/src/databox/azext_databox/vendored_sdks/databox/aio/operations/_data_box_management_client_operations.py new file mode 100644 index 00000000000..a3cbda7da6e --- /dev/null +++ b/src/databox/azext_databox/vendored_sdks/databox/aio/operations/_data_box_management_client_operations.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class DataBoxManagementClientOperationsMixin: + + async def mitigate( + self, + job_name: str, + resource_group_name: str, + mitigate_job_request: "models.MitigateJobRequest", + **kwargs + ) -> None: + """Request to mitigate for a given job. + + :param job_name: The name of the job Resource within the specified resource group. job names + must be between 3 and 24 characters in length and use any alphanumeric and underscore only. + :type job_name: str + :param resource_group_name: The Resource Group Name. + :type resource_group_name: str + :param mitigate_job_request: Mitigation Request. + :type mitigate_job_request: ~data_box_management_client.models.MitigateJobRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-03-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.mitigate.metadata['url'] # type: ignore + path_format_arguments = { + 'jobName': self._serialize.url("job_name", job_name, 'str', max_length=24, min_length=3, pattern=r'^[-\w\.]+$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(mitigate_job_request, 'MitigateJobRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + mitigate.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBox/jobs/{jobName}/mitigate'} # type: ignore diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/operations/_jobs_operations.py b/src/databox/azext_databox/vendored_sdks/databox/aio/operations/_jobs_operations.py similarity index 89% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/operations/_jobs_operations.py rename to src/databox/azext_databox/vendored_sdks/databox/aio/operations/_jobs_operations.py index f01a52d0a1a..898ad7513d3 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/operations/_jobs_operations.py +++ b/src/databox/azext_databox/vendored_sdks/databox/aio/operations/_jobs_operations.py @@ -16,7 +16,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling -from ... import models as _models +from ... import models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -28,14 +28,14 @@ class JobsOperations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.databox.models + :type models: ~data_box_management_client.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ - models = _models + models = models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -47,7 +47,7 @@ def list( self, skip_token: Optional[str] = None, **kwargs - ) -> AsyncIterable["_models.JobResourceList"]: + ) -> AsyncIterable["models.JobResourceList"]: """Lists all the jobs available under the subscription. :param skip_token: $skipToken is supported on Get list of jobs, which provides the next page in @@ -55,15 +55,15 @@ def list( :type skip_token: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either JobResourceList or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databox.models.JobResourceList] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_box_management_client.models.JobResourceList] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResourceList"] + cls = kwargs.pop('cls', None) # type: ClsType["models.JobResourceList"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" accept = "application/json" def prepare_request(next_link=None): @@ -105,8 +105,9 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -120,7 +121,7 @@ def list_by_resource_group( resource_group_name: str, skip_token: Optional[str] = None, **kwargs - ) -> AsyncIterable["_models.JobResourceList"]: + ) -> AsyncIterable["models.JobResourceList"]: """Lists all the jobs available under the given resource group. :param resource_group_name: The Resource Group Name. @@ -130,15 +131,15 @@ def list_by_resource_group( :type skip_token: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either JobResourceList or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databox.models.JobResourceList] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_box_management_client.models.JobResourceList] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResourceList"] + cls = kwargs.pop('cls', None) # type: ClsType["models.JobResourceList"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" accept = "application/json" def prepare_request(next_link=None): @@ -181,8 +182,9 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -197,7 +199,7 @@ async def get( job_name: str, expand: Optional[str] = None, **kwargs - ) -> "_models.JobResource": + ) -> "models.JobResource": """Gets information about the specified job. :param resource_group_name: The Resource Group Name. @@ -210,15 +212,15 @@ async def get( :type expand: str :keyword callable cls: A custom type or function that will be passed the direct response :return: JobResource, or the result of cls(response) - :rtype: ~azure.mgmt.databox.models.JobResource + :rtype: ~data_box_management_client.models.JobResource :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResource"] + cls = kwargs.pop('cls', None) # type: ClsType["models.JobResource"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" accept = "application/json" # Construct URL @@ -246,7 +248,8 @@ async def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('JobResource', pipeline_response) @@ -260,15 +263,15 @@ async def _create_initial( self, resource_group_name: str, job_name: str, - job_resource: "_models.JobResource", + job_resource: "models.JobResource", **kwargs - ) -> Optional["_models.JobResource"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.JobResource"]] + ) -> Optional["models.JobResource"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.JobResource"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -299,7 +302,8 @@ async def _create_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: @@ -315,9 +319,9 @@ async def begin_create( self, resource_group_name: str, job_name: str, - job_resource: "_models.JobResource", + job_resource: "models.JobResource", **kwargs - ) -> AsyncLROPoller["_models.JobResource"]: + ) -> AsyncLROPoller["models.JobResource"]: """Creates a new job with the specified parameters. Existing job cannot be updated with this API and should instead be updated with the Update job API. @@ -327,7 +331,7 @@ async def begin_create( must be between 3 and 24 characters in length and use any alphanumeric and underscore only. :type job_name: str :param job_resource: Job details from request body. - :type job_resource: ~azure.mgmt.databox.models.JobResource + :type job_resource: ~data_box_management_client.models.JobResource :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a @@ -335,11 +339,11 @@ async def begin_create( :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either JobResource or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databox.models.JobResource] + :rtype: ~azure.core.polling.AsyncLROPoller[~data_box_management_client.models.JobResource] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResource"] + cls = kwargs.pop('cls', None) # type: ClsType["models.JobResource"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -395,7 +399,7 @@ async def _delete_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" accept = "application/json" # Construct URL @@ -419,9 +423,10 @@ async def _delete_initial( pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [202, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) @@ -497,16 +502,16 @@ async def _update_initial( self, resource_group_name: str, job_name: str, - job_resource_update_parameter: "_models.JobResourceUpdateParameter", + job_resource_update_parameter: "models.JobResourceUpdateParameter", if_match: Optional[str] = None, **kwargs - ) -> Optional["_models.JobResource"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.JobResource"]] + ) -> Optional["models.JobResource"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.JobResource"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -539,7 +544,8 @@ async def _update_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: @@ -555,10 +561,10 @@ async def begin_update( self, resource_group_name: str, job_name: str, - job_resource_update_parameter: "_models.JobResourceUpdateParameter", + job_resource_update_parameter: "models.JobResourceUpdateParameter", if_match: Optional[str] = None, **kwargs - ) -> AsyncLROPoller["_models.JobResource"]: + ) -> AsyncLROPoller["models.JobResource"]: """Updates the properties of an existing job. :param resource_group_name: The Resource Group Name. @@ -567,7 +573,7 @@ async def begin_update( must be between 3 and 24 characters in length and use any alphanumeric and underscore only. :type job_name: str :param job_resource_update_parameter: Job update parameters from request body. - :type job_resource_update_parameter: ~azure.mgmt.databox.models.JobResourceUpdateParameter + :type job_resource_update_parameter: ~data_box_management_client.models.JobResourceUpdateParameter :param if_match: Defines the If-Match condition. The patch will be performed only if the ETag of the job on the server matches this value. :type if_match: str @@ -578,11 +584,11 @@ async def begin_update( :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either JobResource or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databox.models.JobResource] + :rtype: ~azure.core.polling.AsyncLROPoller[~data_box_management_client.models.JobResource] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResource"] + cls = kwargs.pop('cls', None) # type: ClsType["models.JobResource"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -632,9 +638,9 @@ async def book_shipment_pick_up( self, resource_group_name: str, job_name: str, - shipment_pick_up_request: "_models.ShipmentPickUpRequest", + shipment_pick_up_request: "models.ShipmentPickUpRequest", **kwargs - ) -> "_models.ShipmentPickUpResponse": + ) -> "models.ShipmentPickUpResponse": """Book shipment pick up. :param resource_group_name: The Resource Group Name. @@ -643,18 +649,18 @@ async def book_shipment_pick_up( must be between 3 and 24 characters in length and use any alphanumeric and underscore only. :type job_name: str :param shipment_pick_up_request: Details of shipment pick up request. - :type shipment_pick_up_request: ~azure.mgmt.databox.models.ShipmentPickUpRequest + :type shipment_pick_up_request: ~data_box_management_client.models.ShipmentPickUpRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ShipmentPickUpResponse, or the result of cls(response) - :rtype: ~azure.mgmt.databox.models.ShipmentPickUpResponse + :rtype: ~data_box_management_client.models.ShipmentPickUpResponse :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ShipmentPickUpResponse"] + cls = kwargs.pop('cls', None) # type: ClsType["models.ShipmentPickUpResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -685,7 +691,8 @@ async def book_shipment_pick_up( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ShipmentPickUpResponse', pipeline_response) @@ -699,7 +706,7 @@ async def cancel( self, resource_group_name: str, job_name: str, - cancellation_reason: "_models.CancellationReason", + cancellation_reason: "models.CancellationReason", **kwargs ) -> None: """CancelJob. @@ -710,7 +717,7 @@ async def cancel( must be between 3 and 24 characters in length and use any alphanumeric and underscore only. :type job_name: str :param cancellation_reason: Reason for cancellation. - :type cancellation_reason: ~azure.mgmt.databox.models.CancellationReason + :type cancellation_reason: ~data_box_management_client.models.CancellationReason :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None @@ -721,7 +728,7 @@ async def cancel( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -752,7 +759,8 @@ async def cancel( if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) @@ -764,7 +772,7 @@ def list_credentials( resource_group_name: str, job_name: str, **kwargs - ) -> AsyncIterable["_models.UnencryptedCredentialsList"]: + ) -> AsyncIterable["models.UnencryptedCredentialsList"]: """This method gets the unencrypted secrets related to the job. :param resource_group_name: The Resource Group Name. @@ -774,15 +782,15 @@ def list_credentials( :type job_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either UnencryptedCredentialsList or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databox.models.UnencryptedCredentialsList] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_box_management_client.models.UnencryptedCredentialsList] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.UnencryptedCredentialsList"] + cls = kwargs.pop('cls', None) # type: ClsType["models.UnencryptedCredentialsList"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" accept = "application/json" def prepare_request(next_link=None): @@ -824,8 +832,9 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/operations/_operations.py b/src/databox/azext_databox/vendored_sdks/databox/aio/operations/_operations.py similarity index 88% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/operations/_operations.py rename to src/databox/azext_databox/vendored_sdks/databox/aio/operations/_operations.py index bd99838be8c..3107884ca08 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/operations/_operations.py +++ b/src/databox/azext_databox/vendored_sdks/databox/aio/operations/_operations.py @@ -14,7 +14,7 @@ from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat -from ... import models as _models +from ... import models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -26,14 +26,14 @@ class Operations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.databox.models + :type models: ~data_box_management_client.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ - models = _models + models = models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -44,20 +44,20 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, **kwargs - ) -> AsyncIterable["_models.OperationList"]: + ) -> AsyncIterable["models.OperationList"]: """This method gets all the operations. :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either OperationList or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databox.models.OperationList] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_box_management_client.models.OperationList] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationList"] + cls = kwargs.pop('cls', None) # type: ClsType["models.OperationList"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" accept = "application/json" def prepare_request(next_link=None): @@ -93,8 +93,9 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/operations/_service_operations.py b/src/databox/azext_databox/vendored_sdks/databox/aio/operations/_service_operations.py similarity index 73% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/operations/_service_operations.py rename to src/databox/azext_databox/vendored_sdks/databox/aio/operations/_service_operations.py index a07ce55b508..ee3f05a9ef6 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/operations/_service_operations.py +++ b/src/databox/azext_databox/vendored_sdks/databox/aio/operations/_service_operations.py @@ -14,7 +14,7 @@ from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat -from ... import models as _models +from ... import models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -26,14 +26,14 @@ class ServiceOperations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.databox.models + :type models: ~data_box_management_client.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ - models = _models + models = models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -41,94 +41,13 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config - def list_available_skus( - self, - location: str, - available_sku_request: "_models.AvailableSkuRequest", - **kwargs - ) -> AsyncIterable["_models.AvailableSkusResult"]: - """This method provides the list of available skus for the given subscription and location. - - :param location: The location of the resource. - :type location: str - :param available_sku_request: Filters for showing the available skus. - :type available_sku_request: ~azure.mgmt.databox.models.AvailableSkuRequest - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either AvailableSkusResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databox.models.AvailableSkusResult] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.AvailableSkusResult"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" - content_type = "application/json" - accept = "application/json" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - if not next_link: - # Construct URL - url = self.list_available_skus.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'location': self._serialize.url("location", location, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(available_sku_request, 'AvailableSkuRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(available_sku_request, 'AvailableSkuRequest') - body_content_kwargs['content'] = body_content - request = self._client.get(url, query_parameters, header_parameters, **body_content_kwargs) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('AvailableSkusResult', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_available_skus.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataBox/locations/{location}/availableSkus'} # type: ignore - def list_available_skus_by_resource_group( self, resource_group_name: str, location: str, - available_sku_request: "_models.AvailableSkuRequest", + available_sku_request: "models.AvailableSkuRequest", **kwargs - ) -> AsyncIterable["_models.AvailableSkusResult"]: + ) -> AsyncIterable["models.AvailableSkusResult"]: """This method provides the list of available skus for the given subscription, resource group and location. @@ -137,18 +56,18 @@ def list_available_skus_by_resource_group( :param location: The location of the resource. :type location: str :param available_sku_request: Filters for showing the available skus. - :type available_sku_request: ~azure.mgmt.databox.models.AvailableSkuRequest + :type available_sku_request: ~data_box_management_client.models.AvailableSkuRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either AvailableSkusResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databox.models.AvailableSkusResult] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_box_management_client.models.AvailableSkusResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.AvailableSkusResult"] + cls = kwargs.pop('cls', None) # type: ClsType["models.AvailableSkusResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" content_type = "application/json" accept = "application/json" @@ -198,8 +117,9 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -211,27 +131,27 @@ async def get_next(next_link=None): async def validate_address( self, location: str, - validate_address: "_models.ValidateAddress", + validate_address: "models.ValidateAddress", **kwargs - ) -> "_models.AddressValidationOutput": - """[DEPRECATED NOTICE: This operation will soon be removed] This method validates the customer + ) -> "models.AddressValidationOutput": + """[DEPRECATED NOTICE: This operation will soon be removed]. This method validates the customer shipping address and provide alternate addresses if any. :param location: The location of the resource. :type location: str :param validate_address: Shipping address of the customer. - :type validate_address: ~azure.mgmt.databox.models.ValidateAddress + :type validate_address: ~data_box_management_client.models.ValidateAddress :keyword callable cls: A custom type or function that will be passed the direct response :return: AddressValidationOutput, or the result of cls(response) - :rtype: ~azure.mgmt.databox.models.AddressValidationOutput + :rtype: ~data_box_management_client.models.AddressValidationOutput :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.AddressValidationOutput"] + cls = kwargs.pop('cls', None) # type: ClsType["models.AddressValidationOutput"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -261,7 +181,8 @@ async def validate_address( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('AddressValidationOutput', pipeline_response) @@ -275,9 +196,9 @@ async def validate_inputs_by_resource_group( self, resource_group_name: str, location: str, - validation_request: "_models.ValidationRequest", + validation_request: "models.ValidationRequest", **kwargs - ) -> "_models.ValidationResponse": + ) -> "models.ValidationResponse": """This method does all necessary pre-job creation validation under resource group. :param resource_group_name: The Resource Group Name. @@ -285,18 +206,18 @@ async def validate_inputs_by_resource_group( :param location: The location of the resource. :type location: str :param validation_request: Inputs of the customer. - :type validation_request: ~azure.mgmt.databox.models.ValidationRequest + :type validation_request: ~data_box_management_client.models.ValidationRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ValidationResponse, or the result of cls(response) - :rtype: ~azure.mgmt.databox.models.ValidationResponse + :rtype: ~data_box_management_client.models.ValidationResponse :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ValidationResponse"] + cls = kwargs.pop('cls', None) # type: ClsType["models.ValidationResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -327,7 +248,8 @@ async def validate_inputs_by_resource_group( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ValidationResponse', pipeline_response) @@ -340,26 +262,26 @@ async def validate_inputs_by_resource_group( async def validate_inputs( self, location: str, - validation_request: "_models.ValidationRequest", + validation_request: "models.ValidationRequest", **kwargs - ) -> "_models.ValidationResponse": + ) -> "models.ValidationResponse": """This method does all necessary pre-job creation validation under subscription. :param location: The location of the resource. :type location: str :param validation_request: Inputs of the customer. - :type validation_request: ~azure.mgmt.databox.models.ValidationRequest + :type validation_request: ~data_box_management_client.models.ValidationRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ValidationResponse, or the result of cls(response) - :rtype: ~azure.mgmt.databox.models.ValidationResponse + :rtype: ~data_box_management_client.models.ValidationResponse :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ValidationResponse"] + cls = kwargs.pop('cls', None) # type: ClsType["models.ValidationResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -389,7 +311,8 @@ async def validate_inputs( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ValidationResponse', pipeline_response) @@ -402,26 +325,27 @@ async def validate_inputs( async def region_configuration( self, location: str, - region_configuration_request: "_models.RegionConfigurationRequest", + region_configuration_request: "models.RegionConfigurationRequest", **kwargs - ) -> "_models.RegionConfigurationResponse": - """This API provides configuration details specific to given region/location. + ) -> "models.RegionConfigurationResponse": + """This API provides configuration details specific to given region/location at Subscription + level. :param location: The location of the resource. :type location: str :param region_configuration_request: Request body to get the configuration for the region. - :type region_configuration_request: ~azure.mgmt.databox.models.RegionConfigurationRequest + :type region_configuration_request: ~data_box_management_client.models.RegionConfigurationRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: RegionConfigurationResponse, or the result of cls(response) - :rtype: ~azure.mgmt.databox.models.RegionConfigurationResponse + :rtype: ~data_box_management_client.models.RegionConfigurationResponse :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.RegionConfigurationResponse"] + cls = kwargs.pop('cls', None) # type: ClsType["models.RegionConfigurationResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -451,7 +375,8 @@ async def region_configuration( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('RegionConfigurationResponse', pipeline_response) @@ -460,3 +385,72 @@ async def region_configuration( return deserialized region_configuration.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataBox/locations/{location}/regionConfiguration'} # type: ignore + + async def region_configuration_by_resource_group( + self, + resource_group_name: str, + location: str, + region_configuration_request: "models.RegionConfigurationRequest", + **kwargs + ) -> "models.RegionConfigurationResponse": + """This API provides configuration details specific to given region/location at Resource group + level. + + :param resource_group_name: The Resource Group Name. + :type resource_group_name: str + :param location: The location of the resource. + :type location: str + :param region_configuration_request: Request body to get the configuration for the region at + resource group level. + :type region_configuration_request: ~data_box_management_client.models.RegionConfigurationRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: RegionConfigurationResponse, or the result of cls(response) + :rtype: ~data_box_management_client.models.RegionConfigurationResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.RegionConfigurationResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-03-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.region_configuration_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'location': self._serialize.url("location", location, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(region_configuration_request, 'RegionConfigurationRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('RegionConfigurationResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + region_configuration_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBox/locations/{location}/regionConfiguration'} # type: ignore diff --git a/src/databox/azext_databox/vendored_sdks/databox/models.py b/src/databox/azext_databox/vendored_sdks/databox/models.py deleted file mode 100644 index 1d5d79558e5..00000000000 --- a/src/databox/azext_databox/vendored_sdks/databox/models.py +++ /dev/null @@ -1,7 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -from .v2019_09_01.models import * diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/models/__init__.py b/src/databox/azext_databox/vendored_sdks/databox/models/__init__.py similarity index 73% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/models/__init__.py rename to src/databox/azext_databox/vendored_sdks/databox/models/__init__.py index 84b34d519a6..6fab328348a 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/models/__init__.py +++ b/src/databox/azext_databox/vendored_sdks/databox/models/__init__.py @@ -8,12 +8,16 @@ try: from ._models_py3 import AccountCredentialDetails + from ._models_py3 import AdditionalErrorInfo from ._models_py3 import AddressValidationOutput from ._models_py3 import AddressValidationProperties + from ._models_py3 import ApiError from ._models_py3 import ApplianceNetworkConfiguration from ._models_py3 import ArmBaseObject from ._models_py3 import AvailableSkuRequest from ._models_py3 import AvailableSkusResult + from ._models_py3 import AzureFileFilterDetails + from ._models_py3 import BlobFilterDetails from ._models_py3 import CancellationReason from ._models_py3 import CloudError from ._models_py3 import ContactDetails @@ -22,6 +26,7 @@ from ._models_py3 import CreateJobValidations from ._models_py3 import CreateOrderLimitForSubscriptionValidationRequest from ._models_py3 import CreateOrderLimitForSubscriptionValidationResponseProperties + from ._models_py3 import DataAccountDetails from ._models_py3 import DataBoxAccountCopyLogDetails from ._models_py3 import DataBoxDiskCopyLogDetails from ._models_py3 import DataBoxDiskCopyProgress @@ -34,26 +39,32 @@ from ._models_py3 import DataBoxJobDetails from ._models_py3 import DataBoxScheduleAvailabilityRequest from ._models_py3 import DataBoxSecret - from ._models_py3 import DataDestinationDetailsValidationRequest - from ._models_py3 import DataDestinationDetailsValidationResponseProperties + from ._models_py3 import DataExportDetails + from ._models_py3 import DataImportDetails + from ._models_py3 import DataLocationToServiceLocationMap + from ._models_py3 import DataTransferDetailsValidationRequest + from ._models_py3 import DataTransferDetailsValidationResponseProperties from ._models_py3 import DataboxJobSecrets from ._models_py3 import DcAccessSecurityCode - from ._models_py3 import DestinationAccountDetails - from ._models_py3 import DestinationManagedDiskDetails - from ._models_py3 import DestinationStorageAccountDetails - from ._models_py3 import DestinationToServiceLocationMap + from ._models_py3 import Details from ._models_py3 import DiskScheduleAvailabilityRequest from ._models_py3 import DiskSecret - from ._models_py3 import Error + from ._models_py3 import EncryptionPreferences + from ._models_py3 import ErrorDetail + from ._models_py3 import FilterFileDetails from ._models_py3 import HeavyScheduleAvailabilityRequest + from ._models_py3 import IdentityProperties from ._models_py3 import JobDeliveryInfo from ._models_py3 import JobDetails - from ._models_py3 import JobErrorDetails from ._models_py3 import JobResource from ._models_py3 import JobResourceList from ._models_py3 import JobResourceUpdateParameter from ._models_py3 import JobSecrets from ._models_py3 import JobStages + from ._models_py3 import KeyEncryptionKey + from ._models_py3 import LastMitigationActionOnJob + from ._models_py3 import ManagedDiskDetails + from ._models_py3 import MitigateJobRequest from ._models_py3 import NotificationPreference from ._models_py3 import Operation from ._models_py3 import OperationDisplay @@ -65,6 +76,7 @@ from ._models_py3 import RegionConfigurationRequest from ._models_py3 import RegionConfigurationResponse from ._models_py3 import Resource + from ._models_py3 import ResourceIdentity from ._models_py3 import ScheduleAvailabilityRequest from ._models_py3 import ScheduleAvailabilityResponse from ._models_py3 import ShareCredentialDetails @@ -77,8 +89,15 @@ from ._models_py3 import SkuCapacity from ._models_py3 import SkuCost from ._models_py3 import SkuInformation + from ._models_py3 import StorageAccountDetails from ._models_py3 import SubscriptionIsAllowedToCreateJobValidationRequest from ._models_py3 import SubscriptionIsAllowedToCreateJobValidationResponseProperties + from ._models_py3 import SystemData + from ._models_py3 import TransferAllDetails + from ._models_py3 import TransferConfiguration + from ._models_py3 import TransferConfigurationTransferAllDetails + from ._models_py3 import TransferConfigurationTransferFilterDetails + from ._models_py3 import TransferFilterDetails from ._models_py3 import TransportAvailabilityDetails from ._models_py3 import TransportAvailabilityRequest from ._models_py3 import TransportAvailabilityResponse @@ -86,6 +105,8 @@ from ._models_py3 import UnencryptedCredentials from ._models_py3 import UnencryptedCredentialsList from ._models_py3 import UpdateJobDetails + from ._models_py3 import UserAssignedIdentity + from ._models_py3 import UserAssignedProperties from ._models_py3 import ValidateAddress from ._models_py3 import ValidationInputRequest from ._models_py3 import ValidationInputResponse @@ -93,12 +114,16 @@ from ._models_py3 import ValidationResponse except (SyntaxError, ImportError): from ._models import AccountCredentialDetails # type: ignore + from ._models import AdditionalErrorInfo # type: ignore from ._models import AddressValidationOutput # type: ignore from ._models import AddressValidationProperties # type: ignore + from ._models import ApiError # type: ignore from ._models import ApplianceNetworkConfiguration # type: ignore from ._models import ArmBaseObject # type: ignore from ._models import AvailableSkuRequest # type: ignore from ._models import AvailableSkusResult # type: ignore + from ._models import AzureFileFilterDetails # type: ignore + from ._models import BlobFilterDetails # type: ignore from ._models import CancellationReason # type: ignore from ._models import CloudError # type: ignore from ._models import ContactDetails # type: ignore @@ -107,6 +132,7 @@ from ._models import CreateJobValidations # type: ignore from ._models import CreateOrderLimitForSubscriptionValidationRequest # type: ignore from ._models import CreateOrderLimitForSubscriptionValidationResponseProperties # type: ignore + from ._models import DataAccountDetails # type: ignore from ._models import DataBoxAccountCopyLogDetails # type: ignore from ._models import DataBoxDiskCopyLogDetails # type: ignore from ._models import DataBoxDiskCopyProgress # type: ignore @@ -119,26 +145,32 @@ from ._models import DataBoxJobDetails # type: ignore from ._models import DataBoxScheduleAvailabilityRequest # type: ignore from ._models import DataBoxSecret # type: ignore - from ._models import DataDestinationDetailsValidationRequest # type: ignore - from ._models import DataDestinationDetailsValidationResponseProperties # type: ignore + from ._models import DataExportDetails # type: ignore + from ._models import DataImportDetails # type: ignore + from ._models import DataLocationToServiceLocationMap # type: ignore + from ._models import DataTransferDetailsValidationRequest # type: ignore + from ._models import DataTransferDetailsValidationResponseProperties # type: ignore from ._models import DataboxJobSecrets # type: ignore from ._models import DcAccessSecurityCode # type: ignore - from ._models import DestinationAccountDetails # type: ignore - from ._models import DestinationManagedDiskDetails # type: ignore - from ._models import DestinationStorageAccountDetails # type: ignore - from ._models import DestinationToServiceLocationMap # type: ignore + from ._models import Details # type: ignore from ._models import DiskScheduleAvailabilityRequest # type: ignore from ._models import DiskSecret # type: ignore - from ._models import Error # type: ignore + from ._models import EncryptionPreferences # type: ignore + from ._models import ErrorDetail # type: ignore + from ._models import FilterFileDetails # type: ignore from ._models import HeavyScheduleAvailabilityRequest # type: ignore + from ._models import IdentityProperties # type: ignore from ._models import JobDeliveryInfo # type: ignore from ._models import JobDetails # type: ignore - from ._models import JobErrorDetails # type: ignore from ._models import JobResource # type: ignore from ._models import JobResourceList # type: ignore from ._models import JobResourceUpdateParameter # type: ignore from ._models import JobSecrets # type: ignore from ._models import JobStages # type: ignore + from ._models import KeyEncryptionKey # type: ignore + from ._models import LastMitigationActionOnJob # type: ignore + from ._models import ManagedDiskDetails # type: ignore + from ._models import MitigateJobRequest # type: ignore from ._models import NotificationPreference # type: ignore from ._models import Operation # type: ignore from ._models import OperationDisplay # type: ignore @@ -150,6 +182,7 @@ from ._models import RegionConfigurationRequest # type: ignore from ._models import RegionConfigurationResponse # type: ignore from ._models import Resource # type: ignore + from ._models import ResourceIdentity # type: ignore from ._models import ScheduleAvailabilityRequest # type: ignore from ._models import ScheduleAvailabilityResponse # type: ignore from ._models import ShareCredentialDetails # type: ignore @@ -162,8 +195,15 @@ from ._models import SkuCapacity # type: ignore from ._models import SkuCost # type: ignore from ._models import SkuInformation # type: ignore + from ._models import StorageAccountDetails # type: ignore from ._models import SubscriptionIsAllowedToCreateJobValidationRequest # type: ignore from ._models import SubscriptionIsAllowedToCreateJobValidationResponseProperties # type: ignore + from ._models import SystemData # type: ignore + from ._models import TransferAllDetails # type: ignore + from ._models import TransferConfiguration # type: ignore + from ._models import TransferConfigurationTransferAllDetails # type: ignore + from ._models import TransferConfigurationTransferFilterDetails # type: ignore + from ._models import TransferFilterDetails # type: ignore from ._models import TransportAvailabilityDetails # type: ignore from ._models import TransportAvailabilityRequest # type: ignore from ._models import TransportAvailabilityResponse # type: ignore @@ -171,6 +211,8 @@ from ._models import UnencryptedCredentials # type: ignore from ._models import UnencryptedCredentialsList # type: ignore from ._models import UpdateJobDetails # type: ignore + from ._models import UserAssignedIdentity # type: ignore + from ._models import UserAssignedProperties # type: ignore from ._models import ValidateAddress # type: ignore from ._models import ValidationInputRequest # type: ignore from ._models import ValidationInputResponse # type: ignore @@ -183,8 +225,13 @@ AddressValidationStatus, ClassDiscriminator, CopyStatus, - DataDestinationType, + CustomerResolutionCode, + DataAccountType, + DoubleEncryption, + FilterFileType, JobDeliveryType, + KekType, + LogCollectionLevel, NotificationStageName, OverallValidationStatus, ShareDestinationFormatType, @@ -192,6 +239,8 @@ SkuName, StageName, StageStatus, + TransferConfigurationType, + TransferType, TransportShipmentTypes, ValidationInputDiscriminator, ValidationStatus, @@ -199,12 +248,16 @@ __all__ = [ 'AccountCredentialDetails', + 'AdditionalErrorInfo', 'AddressValidationOutput', 'AddressValidationProperties', + 'ApiError', 'ApplianceNetworkConfiguration', 'ArmBaseObject', 'AvailableSkuRequest', 'AvailableSkusResult', + 'AzureFileFilterDetails', + 'BlobFilterDetails', 'CancellationReason', 'CloudError', 'ContactDetails', @@ -213,6 +266,7 @@ 'CreateJobValidations', 'CreateOrderLimitForSubscriptionValidationRequest', 'CreateOrderLimitForSubscriptionValidationResponseProperties', + 'DataAccountDetails', 'DataBoxAccountCopyLogDetails', 'DataBoxDiskCopyLogDetails', 'DataBoxDiskCopyProgress', @@ -225,26 +279,32 @@ 'DataBoxJobDetails', 'DataBoxScheduleAvailabilityRequest', 'DataBoxSecret', - 'DataDestinationDetailsValidationRequest', - 'DataDestinationDetailsValidationResponseProperties', + 'DataExportDetails', + 'DataImportDetails', + 'DataLocationToServiceLocationMap', + 'DataTransferDetailsValidationRequest', + 'DataTransferDetailsValidationResponseProperties', 'DataboxJobSecrets', 'DcAccessSecurityCode', - 'DestinationAccountDetails', - 'DestinationManagedDiskDetails', - 'DestinationStorageAccountDetails', - 'DestinationToServiceLocationMap', + 'Details', 'DiskScheduleAvailabilityRequest', 'DiskSecret', - 'Error', + 'EncryptionPreferences', + 'ErrorDetail', + 'FilterFileDetails', 'HeavyScheduleAvailabilityRequest', + 'IdentityProperties', 'JobDeliveryInfo', 'JobDetails', - 'JobErrorDetails', 'JobResource', 'JobResourceList', 'JobResourceUpdateParameter', 'JobSecrets', 'JobStages', + 'KeyEncryptionKey', + 'LastMitigationActionOnJob', + 'ManagedDiskDetails', + 'MitigateJobRequest', 'NotificationPreference', 'Operation', 'OperationDisplay', @@ -256,6 +316,7 @@ 'RegionConfigurationRequest', 'RegionConfigurationResponse', 'Resource', + 'ResourceIdentity', 'ScheduleAvailabilityRequest', 'ScheduleAvailabilityResponse', 'ShareCredentialDetails', @@ -268,8 +329,15 @@ 'SkuCapacity', 'SkuCost', 'SkuInformation', + 'StorageAccountDetails', 'SubscriptionIsAllowedToCreateJobValidationRequest', 'SubscriptionIsAllowedToCreateJobValidationResponseProperties', + 'SystemData', + 'TransferAllDetails', + 'TransferConfiguration', + 'TransferConfigurationTransferAllDetails', + 'TransferConfigurationTransferFilterDetails', + 'TransferFilterDetails', 'TransportAvailabilityDetails', 'TransportAvailabilityRequest', 'TransportAvailabilityResponse', @@ -277,6 +345,8 @@ 'UnencryptedCredentials', 'UnencryptedCredentialsList', 'UpdateJobDetails', + 'UserAssignedIdentity', + 'UserAssignedProperties', 'ValidateAddress', 'ValidationInputRequest', 'ValidationInputResponse', @@ -287,8 +357,13 @@ 'AddressValidationStatus', 'ClassDiscriminator', 'CopyStatus', - 'DataDestinationType', + 'CustomerResolutionCode', + 'DataAccountType', + 'DoubleEncryption', + 'FilterFileType', 'JobDeliveryType', + 'KekType', + 'LogCollectionLevel', 'NotificationStageName', 'OverallValidationStatus', 'ShareDestinationFormatType', @@ -296,6 +371,8 @@ 'SkuName', 'StageName', 'StageStatus', + 'TransferConfigurationType', + 'TransferType', 'TransportShipmentTypes', 'ValidationInputDiscriminator', 'ValidationStatus', diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/models/_data_box_management_client_enums.py b/src/databox/azext_databox/vendored_sdks/databox/models/_data_box_management_client_enums.py similarity index 75% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/models/_data_box_management_client_enums.py rename to src/databox/azext_databox/vendored_sdks/databox/models/_data_box_management_client_enums.py index e3da769b08c..a64f3615a69 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/models/_data_box_management_client_enums.py +++ b/src/databox/azext_databox/vendored_sdks/databox/models/_data_box_management_client_enums.py @@ -51,9 +51,9 @@ class ClassDiscriminator(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Indicates the type of job details. """ - DATA_BOX = "DataBox" #: Databox orders. - DATA_BOX_DISK = "DataBoxDisk" #: DataboxDisk orders. - DATA_BOX_HEAVY = "DataBoxHeavy" #: DataboxHeavy orders. + DATA_BOX = "DataBox" #: Data Box orders. + DATA_BOX_DISK = "DataBoxDisk" #: Data Box Disk orders. + DATA_BOX_HEAVY = "DataBoxHeavy" #: Data Box Heavy orders. class CopyStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The Status of the copy @@ -71,13 +71,33 @@ class CopyStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): STORAGE_ACCOUNT_NOT_ACCESSIBLE = "StorageAccountNotAccessible" #: Data copy failed. Storage Account was not accessible during copy. UNSUPPORTED_DATA = "UnsupportedData" #: Data copy failed. The Device data content is not supported. -class DataDestinationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """Data Destination Type. +class CustomerResolutionCode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + NONE = "None" #: No Resolution Yet. + MOVE_TO_CLEAN_UP_DEVICE = "MoveToCleanUpDevice" #: Clean the device. + RESUME = "Resume" #: Resume the job to same stage. + +class DataAccountType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Type of the account. """ STORAGE_ACCOUNT = "StorageAccount" #: Storage Accounts . MANAGED_DISK = "ManagedDisk" #: Azure Managed disk storage. +class DoubleEncryption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Defines secondary layer of software-based encryption enablement. + """ + + ENABLED = "Enabled" #: Software-based encryption is enabled. + DISABLED = "Disabled" #: Software-based encryption is disabled. + +class FilterFileType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Type of the filter file. + """ + + AZURE_BLOB = "AzureBlob" #: Filter file is of the type AzureBlob. + AZURE_FILE = "AzureFile" #: Filter file is of the type AzureFiles. + class JobDeliveryType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Delivery type of Job. """ @@ -85,6 +105,20 @@ class JobDeliveryType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): NON_SCHEDULED = "NonScheduled" #: Non Scheduled job. SCHEDULED = "Scheduled" #: Scheduled job. +class KekType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Type of encryption key used for key encryption. + """ + + MICROSOFT_MANAGED = "MicrosoftManaged" #: Key encryption key is managed by Microsoft. + CUSTOMER_MANAGED = "CustomerManaged" #: Key encryption key is managed by the Customer. + +class LogCollectionLevel(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Level of the logs to be collected. + """ + + ERROR = "Error" #: Only Errors will be collected in the logs. + VERBOSE = "Verbose" #: Verbose logging (includes Errors, CRC, size information and others). + class NotificationStageName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Name of the stage. """ @@ -93,7 +127,7 @@ class NotificationStageName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)) DISPATCHED = "Dispatched" #: Notification at device dispatched stage. DELIVERED = "Delivered" #: Notification at device delivered stage. PICKED_UP = "PickedUp" #: Notification at device picked up from user stage. - AT_AZURE_DC = "AtAzureDC" #: Notification at device received at azure datacenter stage. + AT_AZURE_DC = "AtAzureDC" #: Notification at device received at Azure datacenter stage. DATA_COPY = "DataCopy" #: Notification at data copy started stage. class OverallValidationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): @@ -128,9 +162,9 @@ class SkuDisabledReason(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): class SkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - DATA_BOX = "DataBox" #: Databox. - DATA_BOX_DISK = "DataBoxDisk" #: DataboxDisk. - DATA_BOX_HEAVY = "DataBoxHeavy" #: DataboxHeavy. + DATA_BOX = "DataBox" #: Data Box. + DATA_BOX_DISK = "DataBoxDisk" #: Data Box Disk. + DATA_BOX_HEAVY = "DataBoxHeavy" #: Data Box Heavy. class StageName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Name of the stage which is in progress. @@ -140,14 +174,14 @@ class StageName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): DEVICE_PREPARED = "DevicePrepared" #: A device has been prepared for the order. DISPATCHED = "Dispatched" #: Device has been dispatched to the user of the order. DELIVERED = "Delivered" #: Device has been delivered to the user of the order. - PICKED_UP = "PickedUp" #: Device has been picked up from user and in transit to azure datacenter. - AT_AZURE_DC = "AtAzureDC" #: Device has been received at azure datacenter from the user. - DATA_COPY = "DataCopy" #: Data copy from the device at azure datacenter. + PICKED_UP = "PickedUp" #: Device has been picked up from user and in transit to Azure datacenter. + AT_AZURE_DC = "AtAzureDC" #: Device has been received at Azure datacenter from the user. + DATA_COPY = "DataCopy" #: Data copy from the device at Azure datacenter. COMPLETED = "Completed" #: Order has completed. COMPLETED_WITH_ERRORS = "CompletedWithErrors" #: Order has completed with errors. CANCELLED = "Cancelled" #: Order has been cancelled. FAILED_ISSUE_REPORTED_AT_CUSTOMER = "Failed_IssueReportedAtCustomer" #: Order has failed due to issue reported by user. - FAILED_ISSUE_DETECTED_AT_AZURE_DC = "Failed_IssueDetectedAtAzureDC" #: Order has failed due to issue detected at azure datacenter. + FAILED_ISSUE_DETECTED_AT_AZURE_DC = "Failed_IssueDetectedAtAzureDC" #: Order has failed due to issue detected at Azure datacenter. ABORTED = "Aborted" #: Order has been aborted. COMPLETED_WITH_WARNINGS = "CompletedWithWarnings" #: Order has completed with warnings. READY_TO_DISPATCH_FROM_AZURE_DC = "ReadyToDispatchFromAzureDC" #: Device is ready to be handed to customer from Azure DC. @@ -164,6 +198,22 @@ class StageStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): CANCELLED = "Cancelled" #: Stage has been cancelled. CANCELLING = "Cancelling" #: Stage is cancelling. SUCCEEDED_WITH_ERRORS = "SucceededWithErrors" #: Stage has succeeded with errors. + WAITING_FOR_CUSTOMER_ACTION = "WaitingForCustomerAction" #: Stage is stuck until customer takes some action. + SUCCEEDED_WITH_WARNINGS = "SucceededWithWarnings" #: Stage has succeeded with warnings. + +class TransferConfigurationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Type of the configuration for transfer. + """ + + TRANSFER_ALL = "TransferAll" #: Transfer all the data. + TRANSFER_USING_FILTER = "TransferUsingFilter" #: Transfer using filter. + +class TransferType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Type of the transfer. + """ + + IMPORT_TO_AZURE = "ImportToAzure" #: Import data to azure. + EXPORT_FROM_AZURE = "ExportFromAzure" #: Export data from azure. class TransportShipmentTypes(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Transport Shipment Type supported for given region. @@ -177,11 +227,11 @@ class ValidationInputDiscriminator(with_metaclass(_CaseInsensitiveEnumMeta, str, """ VALIDATE_ADDRESS = "ValidateAddress" #: Identify request and response of address validation. - VALIDATE_DATA_DESTINATION_DETAILS = "ValidateDataDestinationDetails" #: Identify request and response of data destination details validation. VALIDATE_SUBSCRIPTION_IS_ALLOWED_TO_CREATE_JOB = "ValidateSubscriptionIsAllowedToCreateJob" #: Identify request and response for validation of subscription permission to create job. VALIDATE_PREFERENCES = "ValidatePreferences" #: Identify request and response of preference validation. VALIDATE_CREATE_ORDER_LIMIT = "ValidateCreateOrderLimit" #: Identify request and response of create order limit for subscription validation. VALIDATE_SKU_AVAILABILITY = "ValidateSkuAvailability" #: Identify request and response of active job limit for sku availability. + VALIDATE_DATA_TRANSFER_DETAILS = "ValidateDataTransferDetails" #: Identify request and response of data transfer details validation. class ValidationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Create order limit validation status. diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/models/_models.py b/src/databox/azext_databox/vendored_sdks/databox/models/_models.py similarity index 61% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/models/_models.py rename to src/databox/azext_databox/vendored_sdks/databox/models/_models.py index 0d62bb7ec20..2ce4d4dcf7b 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/models/_models.py +++ b/src/databox/azext_databox/vendored_sdks/databox/models/_models.py @@ -6,6 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from azure.core.exceptions import HttpResponseError import msrest.serialization @@ -16,26 +17,27 @@ class AccountCredentialDetails(msrest.serialization.Model): :ivar account_name: Name of the account. :vartype account_name: str - :ivar data_destination_type: Data Destination Type. Possible values include: "StorageAccount", + :ivar data_account_type: Type of the account. Possible values include: "StorageAccount", "ManagedDisk". - :vartype data_destination_type: str or ~azure.mgmt.databox.models.DataDestinationType + :vartype data_account_type: str or ~data_box_management_client.models.DataAccountType :ivar account_connection_string: Connection string of the account endpoint to use the account as a storage endpoint on the device. :vartype account_connection_string: str :ivar share_credential_details: Per share level unencrypted access credentials. - :vartype share_credential_details: list[~azure.mgmt.databox.models.ShareCredentialDetails] + :vartype share_credential_details: + list[~data_box_management_client.models.ShareCredentialDetails] """ _validation = { 'account_name': {'readonly': True}, - 'data_destination_type': {'readonly': True}, + 'data_account_type': {'readonly': True}, 'account_connection_string': {'readonly': True}, 'share_credential_details': {'readonly': True}, } _attribute_map = { 'account_name': {'key': 'accountName', 'type': 'str'}, - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, 'account_connection_string': {'key': 'accountConnectionString', 'type': 'str'}, 'share_credential_details': {'key': 'shareCredentialDetails', 'type': '[ShareCredentialDetails]'}, } @@ -46,28 +48,51 @@ def __init__( ): super(AccountCredentialDetails, self).__init__(**kwargs) self.account_name = None - self.data_destination_type = None + self.data_account_type = None self.account_connection_string = None self.share_credential_details = None +class AdditionalErrorInfo(msrest.serialization.Model): + """Additional error info. + + :param type: Additional error type. + :type type: str + :param info: Additional error info. + :type info: object + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'info': {'key': 'info', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AdditionalErrorInfo, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + self.info = kwargs.get('info', None) + + class AddressValidationOutput(msrest.serialization.Model): """Output of the address validation api. Variables are only populated by the server, and will be ignored when sending a request. :param validation_type: Identifies the type of validation response.Constant filled by server. - Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", - "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", + "ValidatePreferences", "ValidateCreateOrderLimit", "ValidateSkuAvailability", + "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar validation_status: The address validation status. Possible values include: "Valid", "Invalid", "Ambiguous". - :vartype validation_status: str or ~azure.mgmt.databox.models.AddressValidationStatus + :vartype validation_status: str or ~data_box_management_client.models.AddressValidationStatus :ivar alternate_addresses: List of alternate addresses. - :vartype alternate_addresses: list[~azure.mgmt.databox.models.ShippingAddress] + :vartype alternate_addresses: list[~data_box_management_client.models.ShippingAddress] """ _validation = { @@ -78,7 +103,7 @@ class AddressValidationOutput(msrest.serialization.Model): _attribute_map = { 'validation_type': {'key': 'properties.validationType', 'type': 'str'}, - 'error': {'key': 'properties.error', 'type': 'Error'}, + 'error': {'key': 'properties.error', 'type': 'CloudError'}, 'validation_status': {'key': 'properties.validationStatus', 'type': 'str'}, 'alternate_addresses': {'key': 'properties.alternateAddresses', 'type': '[ShippingAddress]'}, } @@ -98,19 +123,19 @@ class ValidationInputResponse(msrest.serialization.Model): """Minimum properties that should be present in each individual validation response. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AddressValidationProperties, CreateOrderLimitForSubscriptionValidationResponseProperties, DataDestinationDetailsValidationResponseProperties, PreferencesValidationResponseProperties, SkuAvailabilityValidationResponseProperties, SubscriptionIsAllowedToCreateJobValidationResponseProperties. + sub-classes are: AddressValidationProperties, CreateOrderLimitForSubscriptionValidationResponseProperties, DataTransferDetailsValidationResponseProperties, PreferencesValidationResponseProperties, SkuAvailabilityValidationResponseProperties, SubscriptionIsAllowedToCreateJobValidationResponseProperties. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error + :vartype error: ~data_box_management_client.models.CloudError """ _validation = { @@ -120,11 +145,11 @@ class ValidationInputResponse(msrest.serialization.Model): _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, } _subtype_map = { - 'validation_type': {'ValidateAddress': 'AddressValidationProperties', 'ValidateCreateOrderLimit': 'CreateOrderLimitForSubscriptionValidationResponseProperties', 'ValidateDataDestinationDetails': 'DataDestinationDetailsValidationResponseProperties', 'ValidatePreferences': 'PreferencesValidationResponseProperties', 'ValidateSkuAvailability': 'SkuAvailabilityValidationResponseProperties', 'ValidateSubscriptionIsAllowedToCreateJob': 'SubscriptionIsAllowedToCreateJobValidationResponseProperties'} + 'validation_type': {'ValidateAddress': 'AddressValidationProperties', 'ValidateCreateOrderLimit': 'CreateOrderLimitForSubscriptionValidationResponseProperties', 'ValidateDataTransferDetails': 'DataTransferDetailsValidationResponseProperties', 'ValidatePreferences': 'PreferencesValidationResponseProperties', 'ValidateSkuAvailability': 'SkuAvailabilityValidationResponseProperties', 'ValidateSubscriptionIsAllowedToCreateJob': 'SubscriptionIsAllowedToCreateJobValidationResponseProperties'} } def __init__( @@ -144,17 +169,17 @@ class AddressValidationProperties(ValidationInputResponse): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar validation_status: The address validation status. Possible values include: "Valid", "Invalid", "Ambiguous". - :vartype validation_status: str or ~azure.mgmt.databox.models.AddressValidationStatus + :vartype validation_status: str or ~data_box_management_client.models.AddressValidationStatus :ivar alternate_addresses: List of alternate addresses. - :vartype alternate_addresses: list[~azure.mgmt.databox.models.ShippingAddress] + :vartype alternate_addresses: list[~data_box_management_client.models.ShippingAddress] """ _validation = { @@ -166,7 +191,7 @@ class AddressValidationProperties(ValidationInputResponse): _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'validation_status': {'key': 'validationStatus', 'type': 'str'}, 'alternate_addresses': {'key': 'alternateAddresses', 'type': '[ShippingAddress]'}, } @@ -181,6 +206,31 @@ def __init__( self.alternate_addresses = None +class ApiError(msrest.serialization.Model): + """ApiError. + + All required parameters must be populated in order to send to Azure. + + :param error: Required. + :type error: ~data_box_management_client.models.ErrorDetail + """ + + _validation = { + 'error': {'required': True}, + } + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorDetail'}, + } + + def __init__( + self, + **kwargs + ): + super(ApiError, self).__init__(**kwargs) + self.error = kwargs['error'] + + class ApplianceNetworkConfiguration(msrest.serialization.Model): """The Network Adapter configuration of a DataBox. @@ -249,12 +299,11 @@ def __init__( class AvailableSkuRequest(msrest.serialization.Model): """The filters for showing the available skus. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :ivar transfer_type: Required. Type of the transfer. Default value: "ImportToAzure". - :vartype transfer_type: str + :param transfer_type: Required. Type of the transfer. Possible values include: "ImportToAzure", + "ExportFromAzure". + :type transfer_type: str or ~data_box_management_client.models.TransferType :param country: Required. ISO country code. Country for hardware shipment. For codes check: https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements. :type country: str @@ -262,11 +311,11 @@ class AvailableSkuRequest(msrest.serialization.Model): https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. :type location: str :param sku_names: Sku Names to filter for available skus. - :type sku_names: list[str or ~azure.mgmt.databox.models.SkuName] + :type sku_names: list[str or ~data_box_management_client.models.SkuName] """ _validation = { - 'transfer_type': {'required': True, 'constant': True}, + 'transfer_type': {'required': True}, 'country': {'required': True}, 'location': {'required': True}, } @@ -278,13 +327,12 @@ class AvailableSkuRequest(msrest.serialization.Model): 'sku_names': {'key': 'skuNames', 'type': '[str]'}, } - transfer_type = "ImportToAzure" - def __init__( self, **kwargs ): super(AvailableSkuRequest, self).__init__(**kwargs) + self.transfer_type = kwargs['transfer_type'] self.country = kwargs['country'] self.location = kwargs['location'] self.sku_names = kwargs.get('sku_names', None) @@ -296,7 +344,7 @@ class AvailableSkusResult(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. :ivar value: List of available skus. - :vartype value: list[~azure.mgmt.databox.models.SkuInformation] + :vartype value: list[~data_box_management_client.models.SkuInformation] :param next_link: Link for the next set of skus. :type next_link: str """ @@ -319,6 +367,60 @@ def __init__( self.next_link = kwargs.get('next_link', None) +class AzureFileFilterDetails(msrest.serialization.Model): + """Filter details to transfer Azure files. + + :param file_prefix_list: Prefix list of the Azure files to be transferred. + :type file_prefix_list: list[str] + :param file_path_list: List of full path of the files to be transferred. + :type file_path_list: list[str] + :param file_share_list: List of file shares to be transferred. + :type file_share_list: list[str] + """ + + _attribute_map = { + 'file_prefix_list': {'key': 'filePrefixList', 'type': '[str]'}, + 'file_path_list': {'key': 'filePathList', 'type': '[str]'}, + 'file_share_list': {'key': 'fileShareList', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureFileFilterDetails, self).__init__(**kwargs) + self.file_prefix_list = kwargs.get('file_prefix_list', None) + self.file_path_list = kwargs.get('file_path_list', None) + self.file_share_list = kwargs.get('file_share_list', None) + + +class BlobFilterDetails(msrest.serialization.Model): + """Filter details to transfer Azure Blobs. + + :param blob_prefix_list: Prefix list of the Azure blobs to be transferred. + :type blob_prefix_list: list[str] + :param blob_path_list: List of full path of the blobs to be transferred. + :type blob_path_list: list[str] + :param container_list: List of blob containers to be transferred. + :type container_list: list[str] + """ + + _attribute_map = { + 'blob_prefix_list': {'key': 'blobPrefixList', 'type': '[str]'}, + 'blob_path_list': {'key': 'blobPathList', 'type': '[str]'}, + 'container_list': {'key': 'containerList', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(BlobFilterDetails, self).__init__(**kwargs) + self.blob_prefix_list = kwargs.get('blob_prefix_list', None) + self.blob_path_list = kwargs.get('blob_path_list', None) + self.container_list = kwargs.get('container_list', None) + + class CancellationReason(msrest.serialization.Model): """Reason for cancellation. @@ -345,23 +447,25 @@ def __init__( class CloudError(msrest.serialization.Model): - """The error information object. + """Cloud error. Variables are only populated by the server, and will be ignored when sending a request. - :ivar code: Error code string. - :vartype code: str - :ivar message: Descriptive error information. - :vartype message: str - :param target: Error target. + :param code: Cloud error code. + :type code: str + :param message: Cloud error message. + :type message: str + :param target: Cloud error target. :type target: str - :param details: More detailed error information. - :type details: list[~azure.mgmt.databox.models.CloudError] + :ivar details: Cloud error details. + :vartype details: list[~data_box_management_client.models.CloudError] + :ivar additional_info: Cloud error additional info. + :vartype additional_info: list[~data_box_management_client.models.AdditionalErrorInfo] """ _validation = { - 'code': {'readonly': True}, - 'message': {'readonly': True}, + 'details': {'readonly': True}, + 'additional_info': {'readonly': True}, } _attribute_map = { @@ -369,6 +473,7 @@ class CloudError(msrest.serialization.Model): 'message': {'key': 'message', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, 'details': {'key': 'details', 'type': '[CloudError]'}, + 'additional_info': {'key': 'additionalInfo', 'type': '[AdditionalErrorInfo]'}, } def __init__( @@ -376,10 +481,11 @@ def __init__( **kwargs ): super(CloudError, self).__init__(**kwargs) - self.code = None - self.message = None + self.code = kwargs.get('code', None) + self.message = kwargs.get('message', None) self.target = kwargs.get('target', None) - self.details = kwargs.get('details', None) + self.details = None + self.additional_info = None class ContactDetails(msrest.serialization.Model): @@ -398,7 +504,7 @@ class ContactDetails(msrest.serialization.Model): :param email_list: Required. List of Email-ids to be notified about job progress. :type email_list: list[str] :param notification_preference: Notification preference for a job stage. - :type notification_preference: list[~azure.mgmt.databox.models.NotificationPreference] + :type notification_preference: list[~data_box_management_client.models.NotificationPreference] """ _validation = { @@ -439,7 +545,7 @@ class CopyLogDetails(msrest.serialization.Model): :param copy_log_details_type: Required. Indicates the type of job details.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type copy_log_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator + :type copy_log_details_type: str or ~data_box_management_client.models.ClassDiscriminator """ _validation = { @@ -467,20 +573,24 @@ class CopyProgress(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :ivar storage_account_name: Name of the storage account where the data needs to be uploaded. + :ivar storage_account_name: Name of the storage account. This will be empty for data account + types other than storage account. :vartype storage_account_name: str - :ivar data_destination_type: Data Destination Type. Possible values include: "StorageAccount", + :ivar transfer_type: Transfer type of data. Possible values include: "ImportToAzure", + "ExportFromAzure". + :vartype transfer_type: str or ~data_box_management_client.models.TransferType + :ivar data_account_type: Data Account Type. Possible values include: "StorageAccount", "ManagedDisk". - :vartype data_destination_type: str or ~azure.mgmt.databox.models.DataDestinationType + :vartype data_account_type: str or ~data_box_management_client.models.DataAccountType :ivar account_id: Id of the account where the data needs to be uploaded. :vartype account_id: str - :ivar bytes_sent_to_cloud: Amount of data uploaded by the job as of now. - :vartype bytes_sent_to_cloud: long + :ivar bytes_processed: To indicate bytes transferred. + :vartype bytes_processed: long :ivar total_bytes_to_process: Total amount of data to be processed by the job. :vartype total_bytes_to_process: long - :ivar files_processed: Number of files processed by the job as of now. + :ivar files_processed: Number of files processed. :vartype files_processed: long - :ivar total_files_to_process: Total number of files to be processed by the job. + :ivar total_files_to_process: Total files to process. :vartype total_files_to_process: long :ivar invalid_files_processed: Number of files not adhering to azure naming conventions which were processed by automatic renaming. @@ -493,13 +603,21 @@ class CopyProgress(msrest.serialization.Model): :vartype renamed_container_count: long :ivar files_errored_out: Number of files which could not be copied. :vartype files_errored_out: long + :ivar directories_errored_out: To indicate directories errored out in the job. + :vartype directories_errored_out: long + :ivar invalid_directories_processed: To indicate directories renamed. + :vartype invalid_directories_processed: long + :ivar is_enumeration_in_progress: To indicate if enumeration of data is in progress. + Until this is true, the TotalBytesToProcess may not be valid. + :vartype is_enumeration_in_progress: bool """ _validation = { 'storage_account_name': {'readonly': True}, - 'data_destination_type': {'readonly': True}, + 'transfer_type': {'readonly': True}, + 'data_account_type': {'readonly': True}, 'account_id': {'readonly': True}, - 'bytes_sent_to_cloud': {'readonly': True}, + 'bytes_processed': {'readonly': True}, 'total_bytes_to_process': {'readonly': True}, 'files_processed': {'readonly': True}, 'total_files_to_process': {'readonly': True}, @@ -507,13 +625,17 @@ class CopyProgress(msrest.serialization.Model): 'invalid_file_bytes_uploaded': {'readonly': True}, 'renamed_container_count': {'readonly': True}, 'files_errored_out': {'readonly': True}, + 'directories_errored_out': {'readonly': True}, + 'invalid_directories_processed': {'readonly': True}, + 'is_enumeration_in_progress': {'readonly': True}, } _attribute_map = { 'storage_account_name': {'key': 'storageAccountName', 'type': 'str'}, - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, + 'transfer_type': {'key': 'transferType', 'type': 'str'}, + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, 'account_id': {'key': 'accountId', 'type': 'str'}, - 'bytes_sent_to_cloud': {'key': 'bytesSentToCloud', 'type': 'long'}, + 'bytes_processed': {'key': 'bytesProcessed', 'type': 'long'}, 'total_bytes_to_process': {'key': 'totalBytesToProcess', 'type': 'long'}, 'files_processed': {'key': 'filesProcessed', 'type': 'long'}, 'total_files_to_process': {'key': 'totalFilesToProcess', 'type': 'long'}, @@ -521,6 +643,9 @@ class CopyProgress(msrest.serialization.Model): 'invalid_file_bytes_uploaded': {'key': 'invalidFileBytesUploaded', 'type': 'long'}, 'renamed_container_count': {'key': 'renamedContainerCount', 'type': 'long'}, 'files_errored_out': {'key': 'filesErroredOut', 'type': 'long'}, + 'directories_errored_out': {'key': 'directoriesErroredOut', 'type': 'long'}, + 'invalid_directories_processed': {'key': 'invalidDirectoriesProcessed', 'type': 'long'}, + 'is_enumeration_in_progress': {'key': 'isEnumerationInProgress', 'type': 'bool'}, } def __init__( @@ -529,9 +654,10 @@ def __init__( ): super(CopyProgress, self).__init__(**kwargs) self.storage_account_name = None - self.data_destination_type = None + self.transfer_type = None + self.data_account_type = None self.account_id = None - self.bytes_sent_to_cloud = None + self.bytes_processed = None self.total_bytes_to_process = None self.files_processed = None self.total_files_to_process = None @@ -539,32 +665,36 @@ def __init__( self.invalid_file_bytes_uploaded = None self.renamed_container_count = None self.files_errored_out = None + self.directories_errored_out = None + self.invalid_directories_processed = None + self.is_enumeration_in_progress = None class ValidationRequest(msrest.serialization.Model): - """Input request for all pre job creation validation. + """Minimum request requirement of any validation category. You probably want to use the sub-classes and not this class directly. Known sub-classes are: CreateJobValidations. All required parameters must be populated in order to send to Azure. - :param individual_request_details: Required. List of request details contain validationType and - its request as key and value respectively. - :type individual_request_details: list[~azure.mgmt.databox.models.ValidationInputRequest] :param validation_category: Required. Identify the nature of validation.Constant filled by server. :type validation_category: str + :param individual_request_details: Required. List of request details contain validationType and + its request as key and value respectively. + :type individual_request_details: + list[~data_box_management_client.models.ValidationInputRequest] """ _validation = { - 'individual_request_details': {'required': True}, 'validation_category': {'required': True}, + 'individual_request_details': {'required': True}, } _attribute_map = { - 'individual_request_details': {'key': 'individualRequestDetails', 'type': '[ValidationInputRequest]'}, 'validation_category': {'key': 'validationCategory', 'type': 'str'}, + 'individual_request_details': {'key': 'individualRequestDetails', 'type': '[ValidationInputRequest]'}, } _subtype_map = { @@ -576,8 +706,8 @@ def __init__( **kwargs ): super(ValidationRequest, self).__init__(**kwargs) - self.individual_request_details = kwargs['individual_request_details'] self.validation_category = None # type: Optional[str] + self.individual_request_details = kwargs['individual_request_details'] class CreateJobValidations(ValidationRequest): @@ -585,22 +715,23 @@ class CreateJobValidations(ValidationRequest): All required parameters must be populated in order to send to Azure. - :param individual_request_details: Required. List of request details contain validationType and - its request as key and value respectively. - :type individual_request_details: list[~azure.mgmt.databox.models.ValidationInputRequest] :param validation_category: Required. Identify the nature of validation.Constant filled by server. :type validation_category: str + :param individual_request_details: Required. List of request details contain validationType and + its request as key and value respectively. + :type individual_request_details: + list[~data_box_management_client.models.ValidationInputRequest] """ _validation = { - 'individual_request_details': {'required': True}, 'validation_category': {'required': True}, + 'individual_request_details': {'required': True}, } _attribute_map = { - 'individual_request_details': {'key': 'individualRequestDetails', 'type': '[ValidationInputRequest]'}, 'validation_category': {'key': 'validationCategory', 'type': 'str'}, + 'individual_request_details': {'key': 'individualRequestDetails', 'type': '[ValidationInputRequest]'}, } def __init__( @@ -615,15 +746,15 @@ class ValidationInputRequest(msrest.serialization.Model): """Minimum fields that must be present in any type of validation request. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ValidateAddress, CreateOrderLimitForSubscriptionValidationRequest, DataDestinationDetailsValidationRequest, PreferencesValidationRequest, SkuAvailabilityValidationRequest, SubscriptionIsAllowedToCreateJobValidationRequest. + sub-classes are: ValidateAddress, CreateOrderLimitForSubscriptionValidationRequest, DataTransferDetailsValidationRequest, PreferencesValidationRequest, SkuAvailabilityValidationRequest, SubscriptionIsAllowedToCreateJobValidationRequest. All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator """ _validation = { @@ -635,7 +766,7 @@ class ValidationInputRequest(msrest.serialization.Model): } _subtype_map = { - 'validation_type': {'ValidateAddress': 'ValidateAddress', 'ValidateCreateOrderLimit': 'CreateOrderLimitForSubscriptionValidationRequest', 'ValidateDataDestinationDetails': 'DataDestinationDetailsValidationRequest', 'ValidatePreferences': 'PreferencesValidationRequest', 'ValidateSkuAvailability': 'SkuAvailabilityValidationRequest', 'ValidateSubscriptionIsAllowedToCreateJob': 'SubscriptionIsAllowedToCreateJobValidationRequest'} + 'validation_type': {'ValidateAddress': 'ValidateAddress', 'ValidateCreateOrderLimit': 'CreateOrderLimitForSubscriptionValidationRequest', 'ValidateDataTransferDetails': 'DataTransferDetailsValidationRequest', 'ValidatePreferences': 'PreferencesValidationRequest', 'ValidateSkuAvailability': 'SkuAvailabilityValidationRequest', 'ValidateSubscriptionIsAllowedToCreateJob': 'SubscriptionIsAllowedToCreateJobValidationRequest'} } def __init__( @@ -652,13 +783,13 @@ class CreateOrderLimitForSubscriptionValidationRequest(ValidationInputRequest): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :param device_type: Required. Device type to be used for the job. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type device_type: str or ~azure.mgmt.databox.models.SkuName + :type device_type: str or ~data_box_management_client.models.SkuName """ _validation = { @@ -688,15 +819,15 @@ class CreateOrderLimitForSubscriptionValidationResponseProperties(ValidationInpu All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar status: Create order limit validation status. Possible values include: "Valid", "Invalid", "Skipped". - :vartype status: str or ~azure.mgmt.databox.models.ValidationStatus + :vartype status: str or ~data_box_management_client.models.ValidationStatus """ _validation = { @@ -707,7 +838,7 @@ class CreateOrderLimitForSubscriptionValidationResponseProperties(ValidationInpu _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'status': {'key': 'status', 'type': 'str'}, } @@ -720,6 +851,48 @@ def __init__( self.status = None +class DataAccountDetails(msrest.serialization.Model): + """Account details of the data to be transferred. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ManagedDiskDetails, StorageAccountDetails. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Account Type of the data to be transferred.Constant filled + by server. Possible values include: "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param share_password: Password for all the shares to be created on the device. Should not be + passed for TransferType:ExportFromAzure jobs. If this is not passed, the service will generate + password itself. This will not be returned in Get Call. Password Requirements : Password must + be minimum of 12 and maximum of 64 characters. Password must have at least one uppercase + alphabet, one number and one special character. Password cannot have the following characters : + IilLoO0 Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. + :type share_password: str + """ + + _validation = { + 'data_account_type': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'share_password': {'key': 'sharePassword', 'type': 'str'}, + } + + _subtype_map = { + 'data_account_type': {'ManagedDisk': 'ManagedDiskDetails', 'StorageAccount': 'StorageAccountDetails'} + } + + def __init__( + self, + **kwargs + ): + super(DataAccountDetails, self).__init__(**kwargs) + self.data_account_type = None # type: Optional[str] + self.share_password = kwargs.get('share_password', None) + + class DataBoxAccountCopyLogDetails(CopyLogDetails): """Copy log details for a storage account of a DataBox job. @@ -729,23 +902,28 @@ class DataBoxAccountCopyLogDetails(CopyLogDetails): :param copy_log_details_type: Required. Indicates the type of job details.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type copy_log_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :ivar account_name: Destination account name. + :type copy_log_details_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar account_name: Account name. :vartype account_name: str :ivar copy_log_link: Link for copy logs. :vartype copy_log_link: str + :ivar copy_verbose_log_link: Link for copy verbose logs. This will be set only when + LogCollectionLevel is set to Verbose. + :vartype copy_verbose_log_link: str """ _validation = { 'copy_log_details_type': {'required': True}, 'account_name': {'readonly': True}, 'copy_log_link': {'readonly': True}, + 'copy_verbose_log_link': {'readonly': True}, } _attribute_map = { 'copy_log_details_type': {'key': 'copyLogDetailsType', 'type': 'str'}, 'account_name': {'key': 'accountName', 'type': 'str'}, 'copy_log_link': {'key': 'copyLogLink', 'type': 'str'}, + 'copy_verbose_log_link': {'key': 'copyVerboseLogLink', 'type': 'str'}, } def __init__( @@ -756,6 +934,7 @@ def __init__( self.copy_log_details_type = 'DataBox' # type: str self.account_name = None self.copy_log_link = None + self.copy_verbose_log_link = None class DataBoxDiskCopyLogDetails(CopyLogDetails): @@ -767,7 +946,7 @@ class DataBoxDiskCopyLogDetails(CopyLogDetails): :param copy_log_details_type: Required. Indicates the type of job details.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type copy_log_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator + :type copy_log_details_type: str or ~data_box_management_client.models.ClassDiscriminator :ivar disk_serial_number: Disk Serial Number. :vartype disk_serial_number: str :ivar error_log_link: Link for copy error logs. @@ -815,7 +994,7 @@ class DataBoxDiskCopyProgress(msrest.serialization.Model): :ivar status: The Status of the copy. Possible values include: "NotStarted", "InProgress", "Completed", "CompletedWithErrors", "Failed", "NotReturned", "HardwareError", "DeviceFormatted", "DeviceMetadataModified", "StorageAccountNotAccessible", "UnsupportedData". - :vartype status: str or ~azure.mgmt.databox.models.CopyStatus + :vartype status: str or ~data_box_management_client.models.CopyStatus """ _validation = { @@ -853,64 +1032,73 @@ class JobDetails(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param expected_data_size_in_terabytes: The expected size of the data, which needs to be - transferred in this job, in terabytes. - :type expected_data_size_in_terabytes: int :ivar job_stages: List of stages that run in the job. - :vartype job_stages: list[~azure.mgmt.databox.models.JobStages] + :vartype job_stages: list[~data_box_management_client.models.JobStages] :param contact_details: Required. Contact details for notification and shipping. - :type contact_details: ~azure.mgmt.databox.models.ContactDetails - :param shipping_address: Required. Shipping address of the customer. - :type shipping_address: ~azure.mgmt.databox.models.ShippingAddress + :type contact_details: ~data_box_management_client.models.ContactDetails + :param shipping_address: Shipping address of the customer. + :type shipping_address: ~data_box_management_client.models.ShippingAddress :ivar delivery_package: Delivery package shipping details. - :vartype delivery_package: ~azure.mgmt.databox.models.PackageShippingDetails + :vartype delivery_package: ~data_box_management_client.models.PackageShippingDetails :ivar return_package: Return package shipping details. - :vartype return_package: ~azure.mgmt.databox.models.PackageShippingDetails - :param destination_account_details: Required. Destination account details. - :type destination_account_details: list[~azure.mgmt.databox.models.DestinationAccountDetails] - :ivar error_details: Error details for failure. This is optional. - :vartype error_details: list[~azure.mgmt.databox.models.JobErrorDetails] + :vartype return_package: ~data_box_management_client.models.PackageShippingDetails + :param data_import_details: Details of the data to be imported into azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param data_export_details: Details of the data to be exported from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] :param job_details_type: Required. Indicates the type of job details.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator + :type job_details_type: str or ~data_box_management_client.models.ClassDiscriminator :param preferences: Preferences for the order. - :type preferences: ~azure.mgmt.databox.models.Preferences + :type preferences: ~data_box_management_client.models.Preferences :ivar copy_log_details: List of copy log details. - :vartype copy_log_details: list[~azure.mgmt.databox.models.CopyLogDetails] + :vartype copy_log_details: list[~data_box_management_client.models.CopyLogDetails] :ivar reverse_shipment_label_sas_key: Shared access key to download the return shipment label. :vartype reverse_shipment_label_sas_key: str :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. :vartype chain_of_custody_sas_key: str + :param key_encryption_key: Details about which key encryption type is being used. + :type key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey + :param expected_data_size_in_tera_bytes: The expected size of the data, which needs to be + transferred in this job, in terabytes. + :type expected_data_size_in_tera_bytes: int + :ivar actions: Available actions on the job. + :vartype actions: list[str or ~data_box_management_client.models.CustomerResolutionCode] + :ivar last_mitigation_action_on_job: Last mitigation action performed on the job. + :vartype last_mitigation_action_on_job: + ~data_box_management_client.models.LastMitigationActionOnJob """ _validation = { 'job_stages': {'readonly': True}, 'contact_details': {'required': True}, - 'shipping_address': {'required': True}, 'delivery_package': {'readonly': True}, 'return_package': {'readonly': True}, - 'destination_account_details': {'required': True}, - 'error_details': {'readonly': True}, 'job_details_type': {'required': True}, 'copy_log_details': {'readonly': True}, 'reverse_shipment_label_sas_key': {'readonly': True}, 'chain_of_custody_sas_key': {'readonly': True}, + 'actions': {'readonly': True}, + 'last_mitigation_action_on_job': {'readonly': True}, } _attribute_map = { - 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, 'preferences': {'key': 'preferences', 'type': 'Preferences'}, 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, + 'expected_data_size_in_tera_bytes': {'key': 'expectedDataSizeInTeraBytes', 'type': 'int'}, + 'actions': {'key': 'actions', 'type': '[str]'}, + 'last_mitigation_action_on_job': {'key': 'lastMitigationActionOnJob', 'type': 'LastMitigationActionOnJob'}, } _subtype_map = { @@ -922,19 +1110,22 @@ def __init__( **kwargs ): super(JobDetails, self).__init__(**kwargs) - self.expected_data_size_in_terabytes = kwargs.get('expected_data_size_in_terabytes', None) self.job_stages = None self.contact_details = kwargs['contact_details'] - self.shipping_address = kwargs['shipping_address'] + self.shipping_address = kwargs.get('shipping_address', None) self.delivery_package = None self.return_package = None - self.destination_account_details = kwargs['destination_account_details'] - self.error_details = None + self.data_import_details = kwargs.get('data_import_details', None) + self.data_export_details = kwargs.get('data_export_details', None) self.job_details_type = None # type: Optional[str] self.preferences = kwargs.get('preferences', None) self.copy_log_details = None self.reverse_shipment_label_sas_key = None self.chain_of_custody_sas_key = None + self.key_encryption_key = kwargs.get('key_encryption_key', None) + self.expected_data_size_in_tera_bytes = kwargs.get('expected_data_size_in_tera_bytes', None) + self.actions = None + self.last_mitigation_action_on_job = None class DataBoxDiskJobDetails(JobDetails): @@ -944,40 +1135,47 @@ class DataBoxDiskJobDetails(JobDetails): All required parameters must be populated in order to send to Azure. - :param expected_data_size_in_terabytes: The expected size of the data, which needs to be - transferred in this job, in terabytes. - :type expected_data_size_in_terabytes: int :ivar job_stages: List of stages that run in the job. - :vartype job_stages: list[~azure.mgmt.databox.models.JobStages] + :vartype job_stages: list[~data_box_management_client.models.JobStages] :param contact_details: Required. Contact details for notification and shipping. - :type contact_details: ~azure.mgmt.databox.models.ContactDetails - :param shipping_address: Required. Shipping address of the customer. - :type shipping_address: ~azure.mgmt.databox.models.ShippingAddress + :type contact_details: ~data_box_management_client.models.ContactDetails + :param shipping_address: Shipping address of the customer. + :type shipping_address: ~data_box_management_client.models.ShippingAddress :ivar delivery_package: Delivery package shipping details. - :vartype delivery_package: ~azure.mgmt.databox.models.PackageShippingDetails + :vartype delivery_package: ~data_box_management_client.models.PackageShippingDetails :ivar return_package: Return package shipping details. - :vartype return_package: ~azure.mgmt.databox.models.PackageShippingDetails - :param destination_account_details: Required. Destination account details. - :type destination_account_details: list[~azure.mgmt.databox.models.DestinationAccountDetails] - :ivar error_details: Error details for failure. This is optional. - :vartype error_details: list[~azure.mgmt.databox.models.JobErrorDetails] + :vartype return_package: ~data_box_management_client.models.PackageShippingDetails + :param data_import_details: Details of the data to be imported into azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param data_export_details: Details of the data to be exported from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] :param job_details_type: Required. Indicates the type of job details.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator + :type job_details_type: str or ~data_box_management_client.models.ClassDiscriminator :param preferences: Preferences for the order. - :type preferences: ~azure.mgmt.databox.models.Preferences + :type preferences: ~data_box_management_client.models.Preferences :ivar copy_log_details: List of copy log details. - :vartype copy_log_details: list[~azure.mgmt.databox.models.CopyLogDetails] + :vartype copy_log_details: list[~data_box_management_client.models.CopyLogDetails] :ivar reverse_shipment_label_sas_key: Shared access key to download the return shipment label. :vartype reverse_shipment_label_sas_key: str :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. :vartype chain_of_custody_sas_key: str + :param key_encryption_key: Details about which key encryption type is being used. + :type key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey + :param expected_data_size_in_tera_bytes: The expected size of the data, which needs to be + transferred in this job, in terabytes. + :type expected_data_size_in_tera_bytes: int + :ivar actions: Available actions on the job. + :vartype actions: list[str or ~data_box_management_client.models.CustomerResolutionCode] + :ivar last_mitigation_action_on_job: Last mitigation action performed on the job. + :vartype last_mitigation_action_on_job: + ~data_box_management_client.models.LastMitigationActionOnJob :param preferred_disks: User preference on what size disks are needed for the job. The map is from the disk size in TB to the count. Eg. {2,5} means 5 disks of 2 TB size. Key is string but will be checked against an int. :type preferred_disks: dict[str, int] :ivar copy_progress: Copy progress per disk. - :vartype copy_progress: list[~azure.mgmt.databox.models.DataBoxDiskCopyProgress] + :vartype copy_progress: list[~data_box_management_client.models.DataBoxDiskCopyProgress] :ivar disks_and_size_details: Contains the map of disk serial number to the disk size being used for the job. Is returned only after the disks are shipped to the customer. :vartype disks_and_size_details: dict[str, int] @@ -988,33 +1186,35 @@ class DataBoxDiskJobDetails(JobDetails): _validation = { 'job_stages': {'readonly': True}, 'contact_details': {'required': True}, - 'shipping_address': {'required': True}, 'delivery_package': {'readonly': True}, 'return_package': {'readonly': True}, - 'destination_account_details': {'required': True}, - 'error_details': {'readonly': True}, 'job_details_type': {'required': True}, 'copy_log_details': {'readonly': True}, 'reverse_shipment_label_sas_key': {'readonly': True}, 'chain_of_custody_sas_key': {'readonly': True}, + 'actions': {'readonly': True}, + 'last_mitigation_action_on_job': {'readonly': True}, 'copy_progress': {'readonly': True}, 'disks_and_size_details': {'readonly': True}, } _attribute_map = { - 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, 'preferences': {'key': 'preferences', 'type': 'Preferences'}, 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, + 'expected_data_size_in_tera_bytes': {'key': 'expectedDataSizeInTeraBytes', 'type': 'int'}, + 'actions': {'key': 'actions', 'type': '[str]'}, + 'last_mitigation_action_on_job': {'key': 'lastMitigationActionOnJob', 'type': 'LastMitigationActionOnJob'}, 'preferred_disks': {'key': 'preferredDisks', 'type': '{int}'}, 'copy_progress': {'key': 'copyProgress', 'type': '[DataBoxDiskCopyProgress]'}, 'disks_and_size_details': {'key': 'disksAndSizeDetails', 'type': '{int}'}, @@ -1039,22 +1239,29 @@ class JobSecrets(msrest.serialization.Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: DataboxJobSecrets, DataBoxDiskJobSecrets, DataBoxHeavyJobSecrets. + Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_secrets_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :param dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. - :type dc_access_security_code: ~azure.mgmt.databox.models.DcAccessSecurityCode + :type job_secrets_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. + :vartype dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar error: Error while fetching the secrets. + :vartype error: ~data_box_management_client.models.CloudError """ _validation = { 'job_secrets_type': {'required': True}, + 'dc_access_security_code': {'readonly': True}, + 'error': {'readonly': True}, } _attribute_map = { 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, + 'error': {'key': 'error', 'type': 'CloudError'}, } _subtype_map = { @@ -1067,7 +1274,8 @@ def __init__( ): super(JobSecrets, self).__init__(**kwargs) self.job_secrets_type = None # type: Optional[str] - self.dc_access_security_code = kwargs.get('dc_access_security_code', None) + self.dc_access_security_code = None + self.error = None class DataBoxDiskJobSecrets(JobSecrets): @@ -1079,11 +1287,13 @@ class DataBoxDiskJobSecrets(JobSecrets): :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_secrets_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :param dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. - :type dc_access_security_code: ~azure.mgmt.databox.models.DcAccessSecurityCode + :type job_secrets_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. + :vartype dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar error: Error while fetching the secrets. + :vartype error: ~data_box_management_client.models.CloudError :ivar disk_secrets: Contains the list of secrets object for that device. - :vartype disk_secrets: list[~azure.mgmt.databox.models.DiskSecret] + :vartype disk_secrets: list[~data_box_management_client.models.DiskSecret] :ivar pass_key: PassKey for the disk Job. :vartype pass_key: str :ivar is_passkey_user_defined: Whether passkey was provided by user. @@ -1092,6 +1302,8 @@ class DataBoxDiskJobSecrets(JobSecrets): _validation = { 'job_secrets_type': {'required': True}, + 'dc_access_security_code': {'readonly': True}, + 'error': {'readonly': True}, 'disk_secrets': {'readonly': True}, 'pass_key': {'readonly': True}, 'is_passkey_user_defined': {'readonly': True}, @@ -1100,6 +1312,7 @@ class DataBoxDiskJobSecrets(JobSecrets): _attribute_map = { 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'disk_secrets': {'key': 'diskSecrets', 'type': '[DiskSecret]'}, 'pass_key': {'key': 'passKey', 'type': 'str'}, 'is_passkey_user_defined': {'key': 'isPasskeyUserDefined', 'type': 'bool'}, @@ -1125,23 +1338,28 @@ class DataBoxHeavyAccountCopyLogDetails(CopyLogDetails): :param copy_log_details_type: Required. Indicates the type of job details.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type copy_log_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :ivar account_name: Destination account name. + :type copy_log_details_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar account_name: Account name. :vartype account_name: str :ivar copy_log_link: Link for copy logs. :vartype copy_log_link: list[str] + :ivar copy_verbose_log_link: Link for copy verbose logs. This will be set only when the + LogCollectionLevel is set to verbose. + :vartype copy_verbose_log_link: list[str] """ _validation = { 'copy_log_details_type': {'required': True}, 'account_name': {'readonly': True}, 'copy_log_link': {'readonly': True}, + 'copy_verbose_log_link': {'readonly': True}, } _attribute_map = { 'copy_log_details_type': {'key': 'copyLogDetailsType', 'type': 'str'}, 'account_name': {'key': 'accountName', 'type': 'str'}, 'copy_log_link': {'key': 'copyLogLink', 'type': '[str]'}, + 'copy_verbose_log_link': {'key': 'copyVerboseLogLink', 'type': '[str]'}, } def __init__( @@ -1152,6 +1370,7 @@ def __init__( self.copy_log_details_type = 'DataBoxHeavy' # type: str self.account_name = None self.copy_log_link = None + self.copy_verbose_log_link = None class DataBoxHeavyJobDetails(JobDetails): @@ -1161,69 +1380,83 @@ class DataBoxHeavyJobDetails(JobDetails): All required parameters must be populated in order to send to Azure. - :param expected_data_size_in_terabytes: The expected size of the data, which needs to be - transferred in this job, in terabytes. - :type expected_data_size_in_terabytes: int :ivar job_stages: List of stages that run in the job. - :vartype job_stages: list[~azure.mgmt.databox.models.JobStages] + :vartype job_stages: list[~data_box_management_client.models.JobStages] :param contact_details: Required. Contact details for notification and shipping. - :type contact_details: ~azure.mgmt.databox.models.ContactDetails - :param shipping_address: Required. Shipping address of the customer. - :type shipping_address: ~azure.mgmt.databox.models.ShippingAddress + :type contact_details: ~data_box_management_client.models.ContactDetails + :param shipping_address: Shipping address of the customer. + :type shipping_address: ~data_box_management_client.models.ShippingAddress :ivar delivery_package: Delivery package shipping details. - :vartype delivery_package: ~azure.mgmt.databox.models.PackageShippingDetails + :vartype delivery_package: ~data_box_management_client.models.PackageShippingDetails :ivar return_package: Return package shipping details. - :vartype return_package: ~azure.mgmt.databox.models.PackageShippingDetails - :param destination_account_details: Required. Destination account details. - :type destination_account_details: list[~azure.mgmt.databox.models.DestinationAccountDetails] - :ivar error_details: Error details for failure. This is optional. - :vartype error_details: list[~azure.mgmt.databox.models.JobErrorDetails] + :vartype return_package: ~data_box_management_client.models.PackageShippingDetails + :param data_import_details: Details of the data to be imported into azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param data_export_details: Details of the data to be exported from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] :param job_details_type: Required. Indicates the type of job details.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator + :type job_details_type: str or ~data_box_management_client.models.ClassDiscriminator :param preferences: Preferences for the order. - :type preferences: ~azure.mgmt.databox.models.Preferences + :type preferences: ~data_box_management_client.models.Preferences :ivar copy_log_details: List of copy log details. - :vartype copy_log_details: list[~azure.mgmt.databox.models.CopyLogDetails] + :vartype copy_log_details: list[~data_box_management_client.models.CopyLogDetails] :ivar reverse_shipment_label_sas_key: Shared access key to download the return shipment label. :vartype reverse_shipment_label_sas_key: str :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. :vartype chain_of_custody_sas_key: str + :param key_encryption_key: Details about which key encryption type is being used. + :type key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey + :param expected_data_size_in_tera_bytes: The expected size of the data, which needs to be + transferred in this job, in terabytes. + :type expected_data_size_in_tera_bytes: int + :ivar actions: Available actions on the job. + :vartype actions: list[str or ~data_box_management_client.models.CustomerResolutionCode] + :ivar last_mitigation_action_on_job: Last mitigation action performed on the job. + :vartype last_mitigation_action_on_job: + ~data_box_management_client.models.LastMitigationActionOnJob :ivar copy_progress: Copy progress per account. - :vartype copy_progress: list[~azure.mgmt.databox.models.CopyProgress] - :param device_password: Set Device password for unlocking Databox Heavy. + :vartype copy_progress: list[~data_box_management_client.models.CopyProgress] + :param device_password: Set Device password for unlocking Databox Heavy. Should not be passed + for TransferType:ExportFromAzure jobs. If this is not passed, the service will generate + password itself. This will not be returned in Get Call. Password Requirements : Password must + be minimum of 12 and maximum of 64 characters. Password must have at least one uppercase + alphabet, one number and one special character. Password cannot have the following characters : + IilLoO0 Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. :type device_password: str """ _validation = { 'job_stages': {'readonly': True}, 'contact_details': {'required': True}, - 'shipping_address': {'required': True}, 'delivery_package': {'readonly': True}, 'return_package': {'readonly': True}, - 'destination_account_details': {'required': True}, - 'error_details': {'readonly': True}, 'job_details_type': {'required': True}, 'copy_log_details': {'readonly': True}, 'reverse_shipment_label_sas_key': {'readonly': True}, 'chain_of_custody_sas_key': {'readonly': True}, + 'actions': {'readonly': True}, + 'last_mitigation_action_on_job': {'readonly': True}, 'copy_progress': {'readonly': True}, } _attribute_map = { - 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, 'preferences': {'key': 'preferences', 'type': 'Preferences'}, 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, + 'expected_data_size_in_tera_bytes': {'key': 'expectedDataSizeInTeraBytes', 'type': 'int'}, + 'actions': {'key': 'actions', 'type': '[str]'}, + 'last_mitigation_action_on_job': {'key': 'lastMitigationActionOnJob', 'type': 'LastMitigationActionOnJob'}, 'copy_progress': {'key': 'copyProgress', 'type': '[CopyProgress]'}, 'device_password': {'key': 'devicePassword', 'type': 'str'}, } @@ -1247,21 +1480,26 @@ class DataBoxHeavyJobSecrets(JobSecrets): :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_secrets_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :param dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. - :type dc_access_security_code: ~azure.mgmt.databox.models.DcAccessSecurityCode + :type job_secrets_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. + :vartype dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar error: Error while fetching the secrets. + :vartype error: ~data_box_management_client.models.CloudError :ivar cabinet_pod_secrets: Contains the list of secret objects for a databox heavy job. - :vartype cabinet_pod_secrets: list[~azure.mgmt.databox.models.DataBoxHeavySecret] + :vartype cabinet_pod_secrets: list[~data_box_management_client.models.DataBoxHeavySecret] """ _validation = { 'job_secrets_type': {'required': True}, + 'dc_access_security_code': {'readonly': True}, + 'error': {'readonly': True}, 'cabinet_pod_secrets': {'readonly': True}, } _attribute_map = { 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'cabinet_pod_secrets': {'key': 'cabinetPodSecrets', 'type': '[DataBoxHeavySecret]'}, } @@ -1284,12 +1522,14 @@ class DataBoxHeavySecret(msrest.serialization.Model): :ivar device_password: Password for out of the box experience on device. :vartype device_password: str :ivar network_configurations: Network configuration of the appliance. - :vartype network_configurations: list[~azure.mgmt.databox.models.ApplianceNetworkConfiguration] + :vartype network_configurations: + list[~data_box_management_client.models.ApplianceNetworkConfiguration] :ivar encoded_validation_cert_pub_key: The base 64 encoded public key to authenticate with the device. :vartype encoded_validation_cert_pub_key: str :ivar account_credential_details: Per account level access credentials. - :vartype account_credential_details: list[~azure.mgmt.databox.models.AccountCredentialDetails] + :vartype account_credential_details: + list[~data_box_management_client.models.AccountCredentialDetails] """ _validation = { @@ -1327,69 +1567,83 @@ class DataBoxJobDetails(JobDetails): All required parameters must be populated in order to send to Azure. - :param expected_data_size_in_terabytes: The expected size of the data, which needs to be - transferred in this job, in terabytes. - :type expected_data_size_in_terabytes: int :ivar job_stages: List of stages that run in the job. - :vartype job_stages: list[~azure.mgmt.databox.models.JobStages] + :vartype job_stages: list[~data_box_management_client.models.JobStages] :param contact_details: Required. Contact details for notification and shipping. - :type contact_details: ~azure.mgmt.databox.models.ContactDetails - :param shipping_address: Required. Shipping address of the customer. - :type shipping_address: ~azure.mgmt.databox.models.ShippingAddress + :type contact_details: ~data_box_management_client.models.ContactDetails + :param shipping_address: Shipping address of the customer. + :type shipping_address: ~data_box_management_client.models.ShippingAddress :ivar delivery_package: Delivery package shipping details. - :vartype delivery_package: ~azure.mgmt.databox.models.PackageShippingDetails + :vartype delivery_package: ~data_box_management_client.models.PackageShippingDetails :ivar return_package: Return package shipping details. - :vartype return_package: ~azure.mgmt.databox.models.PackageShippingDetails - :param destination_account_details: Required. Destination account details. - :type destination_account_details: list[~azure.mgmt.databox.models.DestinationAccountDetails] - :ivar error_details: Error details for failure. This is optional. - :vartype error_details: list[~azure.mgmt.databox.models.JobErrorDetails] + :vartype return_package: ~data_box_management_client.models.PackageShippingDetails + :param data_import_details: Details of the data to be imported into azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param data_export_details: Details of the data to be exported from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] :param job_details_type: Required. Indicates the type of job details.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator + :type job_details_type: str or ~data_box_management_client.models.ClassDiscriminator :param preferences: Preferences for the order. - :type preferences: ~azure.mgmt.databox.models.Preferences + :type preferences: ~data_box_management_client.models.Preferences :ivar copy_log_details: List of copy log details. - :vartype copy_log_details: list[~azure.mgmt.databox.models.CopyLogDetails] + :vartype copy_log_details: list[~data_box_management_client.models.CopyLogDetails] :ivar reverse_shipment_label_sas_key: Shared access key to download the return shipment label. :vartype reverse_shipment_label_sas_key: str :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. :vartype chain_of_custody_sas_key: str + :param key_encryption_key: Details about which key encryption type is being used. + :type key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey + :param expected_data_size_in_tera_bytes: The expected size of the data, which needs to be + transferred in this job, in terabytes. + :type expected_data_size_in_tera_bytes: int + :ivar actions: Available actions on the job. + :vartype actions: list[str or ~data_box_management_client.models.CustomerResolutionCode] + :ivar last_mitigation_action_on_job: Last mitigation action performed on the job. + :vartype last_mitigation_action_on_job: + ~data_box_management_client.models.LastMitigationActionOnJob :ivar copy_progress: Copy progress per storage account. - :vartype copy_progress: list[~azure.mgmt.databox.models.CopyProgress] - :param device_password: Set Device password for unlocking Databox. + :vartype copy_progress: list[~data_box_management_client.models.CopyProgress] + :param device_password: Set Device password for unlocking Databox. Should not be passed for + TransferType:ExportFromAzure jobs. If this is not passed, the service will generate password + itself. This will not be returned in Get Call. Password Requirements : Password must be + minimum of 12 and maximum of 64 characters. Password must have at least one uppercase alphabet, + one number and one special character. Password cannot have the following characters : IilLoO0 + Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. :type device_password: str """ _validation = { 'job_stages': {'readonly': True}, 'contact_details': {'required': True}, - 'shipping_address': {'required': True}, 'delivery_package': {'readonly': True}, 'return_package': {'readonly': True}, - 'destination_account_details': {'required': True}, - 'error_details': {'readonly': True}, 'job_details_type': {'required': True}, 'copy_log_details': {'readonly': True}, 'reverse_shipment_label_sas_key': {'readonly': True}, 'chain_of_custody_sas_key': {'readonly': True}, + 'actions': {'readonly': True}, + 'last_mitigation_action_on_job': {'readonly': True}, 'copy_progress': {'readonly': True}, } _attribute_map = { - 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, 'preferences': {'key': 'preferences', 'type': 'Preferences'}, 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, + 'expected_data_size_in_tera_bytes': {'key': 'expectedDataSizeInTeraBytes', 'type': 'int'}, + 'actions': {'key': 'actions', 'type': '[str]'}, + 'last_mitigation_action_on_job': {'key': 'lastMitigationActionOnJob', 'type': 'LastMitigationActionOnJob'}, 'copy_progress': {'key': 'copyProgress', 'type': '[CopyProgress]'}, 'device_password': {'key': 'devicePassword', 'type': 'str'}, } @@ -1407,24 +1661,31 @@ def __init__( class DataboxJobSecrets(JobSecrets): """The secrets related to a databox job. + Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_secrets_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :param dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. - :type dc_access_security_code: ~azure.mgmt.databox.models.DcAccessSecurityCode + :type job_secrets_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. + :vartype dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar error: Error while fetching the secrets. + :vartype error: ~data_box_management_client.models.CloudError :param pod_secrets: Contains the list of secret objects for a job. - :type pod_secrets: list[~azure.mgmt.databox.models.DataBoxSecret] + :type pod_secrets: list[~data_box_management_client.models.DataBoxSecret] """ _validation = { 'job_secrets_type': {'required': True}, + 'dc_access_security_code': {'readonly': True}, + 'error': {'readonly': True}, } _attribute_map = { 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'pod_secrets': {'key': 'podSecrets', 'type': '[DataBoxSecret]'}, } @@ -1445,13 +1706,14 @@ class ScheduleAvailabilityRequest(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param storage_location: Required. Location for data transfer. - For locations check: https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api- - version=2018-01-01. + :param storage_location: Required. Location for data transfer. For locations check: + https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. :type storage_location: str :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type sku_name: str or ~azure.mgmt.databox.models.SkuName + :type sku_name: str or ~data_box_management_client.models.SkuName + :param country: Country in which storage location should be supported. + :type country: str """ _validation = { @@ -1462,6 +1724,7 @@ class ScheduleAvailabilityRequest(msrest.serialization.Model): _attribute_map = { 'storage_location': {'key': 'storageLocation', 'type': 'str'}, 'sku_name': {'key': 'skuName', 'type': 'str'}, + 'country': {'key': 'country', 'type': 'str'}, } _subtype_map = { @@ -1475,6 +1738,7 @@ def __init__( super(ScheduleAvailabilityRequest, self).__init__(**kwargs) self.storage_location = kwargs['storage_location'] self.sku_name = None # type: Optional[str] + self.country = kwargs.get('country', None) class DataBoxScheduleAvailabilityRequest(ScheduleAvailabilityRequest): @@ -1482,13 +1746,14 @@ class DataBoxScheduleAvailabilityRequest(ScheduleAvailabilityRequest): All required parameters must be populated in order to send to Azure. - :param storage_location: Required. Location for data transfer. - For locations check: https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api- - version=2018-01-01. + :param storage_location: Required. Location for data transfer. For locations check: + https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. :type storage_location: str :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type sku_name: str or ~azure.mgmt.databox.models.SkuName + :type sku_name: str or ~data_box_management_client.models.SkuName + :param country: Country in which storage location should be supported. + :type country: str """ _validation = { @@ -1499,6 +1764,7 @@ class DataBoxScheduleAvailabilityRequest(ScheduleAvailabilityRequest): _attribute_map = { 'storage_location': {'key': 'storageLocation', 'type': 'str'}, 'sku_name': {'key': 'skuName', 'type': 'str'}, + 'country': {'key': 'country', 'type': 'str'}, } def __init__( @@ -1519,12 +1785,14 @@ class DataBoxSecret(msrest.serialization.Model): :ivar device_password: Password for out of the box experience on device. :vartype device_password: str :ivar network_configurations: Network configuration of the appliance. - :vartype network_configurations: list[~azure.mgmt.databox.models.ApplianceNetworkConfiguration] + :vartype network_configurations: + list[~data_box_management_client.models.ApplianceNetworkConfiguration] :ivar encoded_validation_cert_pub_key: The base 64 encoded public key to authenticate with the device. :vartype encoded_validation_cert_pub_key: str :ivar account_credential_details: Per account level access credentials. - :vartype account_credential_details: list[~azure.mgmt.databox.models.AccountCredentialDetails] + :vartype account_credential_details: + list[~data_box_management_client.models.AccountCredentialDetails] """ _validation = { @@ -1555,257 +1823,235 @@ def __init__( self.account_credential_details = None -class DataDestinationDetailsValidationRequest(ValidationInputRequest): - """Request to validate data destination details. +class DataExportDetails(msrest.serialization.Model): + """Details of the data to be used for exporting data from azure. All required parameters must be populated in order to send to Azure. - :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", - "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator - :param destination_account_details: Required. Destination account details list. - :type destination_account_details: list[~azure.mgmt.databox.models.DestinationAccountDetails] - :param location: Required. Location of stamp or geo. - :type location: str + :param transfer_configuration: Required. Configuration for the data transfer. + :type transfer_configuration: ~data_box_management_client.models.TransferConfiguration + :param log_collection_level: Level of the logs to be collected. Possible values include: + "Error", "Verbose". Default value: "Error". + :type log_collection_level: str or ~data_box_management_client.models.LogCollectionLevel + :param account_details: Required. Account details of the data to be transferred. + :type account_details: ~data_box_management_client.models.DataAccountDetails """ _validation = { - 'validation_type': {'required': True}, - 'destination_account_details': {'required': True}, - 'location': {'required': True}, + 'transfer_configuration': {'required': True}, + 'account_details': {'required': True}, } _attribute_map = { - 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'location': {'key': 'location', 'type': 'str'}, + 'transfer_configuration': {'key': 'transferConfiguration', 'type': 'TransferConfiguration'}, + 'log_collection_level': {'key': 'logCollectionLevel', 'type': 'str'}, + 'account_details': {'key': 'accountDetails', 'type': 'DataAccountDetails'}, } def __init__( self, **kwargs ): - super(DataDestinationDetailsValidationRequest, self).__init__(**kwargs) - self.validation_type = 'ValidateDataDestinationDetails' # type: str - self.destination_account_details = kwargs['destination_account_details'] - self.location = kwargs['location'] + super(DataExportDetails, self).__init__(**kwargs) + self.transfer_configuration = kwargs['transfer_configuration'] + self.log_collection_level = kwargs.get('log_collection_level', "Error") + self.account_details = kwargs['account_details'] -class DataDestinationDetailsValidationResponseProperties(ValidationInputResponse): - """Properties of data destination details validation response. - - Variables are only populated by the server, and will be ignored when sending a request. +class DataImportDetails(msrest.serialization.Model): + """Details of the data to be used for importing data to azure. All required parameters must be populated in order to send to Azure. - :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", - "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator - :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error - :ivar status: Data destination details validation status. Possible values include: "Valid", - "Invalid", "Skipped". - :vartype status: str or ~azure.mgmt.databox.models.ValidationStatus + :param account_details: Required. Account details of the data to be transferred. + :type account_details: ~data_box_management_client.models.DataAccountDetails """ _validation = { - 'validation_type': {'required': True}, - 'error': {'readonly': True}, - 'status': {'readonly': True}, + 'account_details': {'required': True}, } _attribute_map = { - 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, - 'status': {'key': 'status', 'type': 'str'}, + 'account_details': {'key': 'accountDetails', 'type': 'DataAccountDetails'}, } def __init__( self, **kwargs ): - super(DataDestinationDetailsValidationResponseProperties, self).__init__(**kwargs) - self.validation_type = 'ValidateDataDestinationDetails' # type: str - self.status = None + super(DataImportDetails, self).__init__(**kwargs) + self.account_details = kwargs['account_details'] -class DcAccessSecurityCode(msrest.serialization.Model): - """Dc Access Security code for device. +class DataLocationToServiceLocationMap(msrest.serialization.Model): + """Map of data location to service location. - :param forward_dc_access_code: Dc Access Code for dispatching from DC. - :type forward_dc_access_code: str - :param reverse_dc_access_code: Dc Access code for dropping off at DC. - :type reverse_dc_access_code: str + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar data_location: Location of the data. + :vartype data_location: str + :ivar service_location: Location of the service. + :vartype service_location: str """ + _validation = { + 'data_location': {'readonly': True}, + 'service_location': {'readonly': True}, + } + _attribute_map = { - 'forward_dc_access_code': {'key': 'forwardDcAccessCode', 'type': 'str'}, - 'reverse_dc_access_code': {'key': 'reverseDcAccessCode', 'type': 'str'}, + 'data_location': {'key': 'dataLocation', 'type': 'str'}, + 'service_location': {'key': 'serviceLocation', 'type': 'str'}, } def __init__( self, **kwargs ): - super(DcAccessSecurityCode, self).__init__(**kwargs) - self.forward_dc_access_code = kwargs.get('forward_dc_access_code', None) - self.reverse_dc_access_code = kwargs.get('reverse_dc_access_code', None) - + super(DataLocationToServiceLocationMap, self).__init__(**kwargs) + self.data_location = None + self.service_location = None -class DestinationAccountDetails(msrest.serialization.Model): - """Details of the destination storage accounts. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DestinationManagedDiskDetails, DestinationStorageAccountDetails. +class DataTransferDetailsValidationRequest(ValidationInputRequest): + """Request to validate export and import data details. All required parameters must be populated in order to send to Azure. - :param data_destination_type: Required. Data Destination Type.Constant filled by server. - Possible values include: "StorageAccount", "ManagedDisk". - :type data_destination_type: str or ~azure.mgmt.databox.models.DataDestinationType - :param account_id: Arm Id of the destination where the data has to be moved. - :type account_id: str - :param share_password: Share password to be shared by all shares in SA. - :type share_password: str + :param validation_type: Required. Identifies the type of validation request.Constant filled by + server. Possible values include: "ValidateAddress", + "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator + :param data_export_details: List of DataTransfer details to be used to export data from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] + :param data_import_details: List of DataTransfer details to be used to import data to azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param device_type: Required. Device type. Possible values include: "DataBox", "DataBoxDisk", + "DataBoxHeavy". + :type device_type: str or ~data_box_management_client.models.SkuName + :param transfer_type: Required. Type of the transfer. Possible values include: "ImportToAzure", + "ExportFromAzure". + :type transfer_type: str or ~data_box_management_client.models.TransferType """ _validation = { - 'data_destination_type': {'required': True}, + 'validation_type': {'required': True}, + 'device_type': {'required': True}, + 'transfer_type': {'required': True}, } _attribute_map = { - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, - 'account_id': {'key': 'accountId', 'type': 'str'}, - 'share_password': {'key': 'sharePassword', 'type': 'str'}, - } - - _subtype_map = { - 'data_destination_type': {'ManagedDisk': 'DestinationManagedDiskDetails', 'StorageAccount': 'DestinationStorageAccountDetails'} + 'validation_type': {'key': 'validationType', 'type': 'str'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'device_type': {'key': 'deviceType', 'type': 'str'}, + 'transfer_type': {'key': 'transferType', 'type': 'str'}, } def __init__( self, **kwargs ): - super(DestinationAccountDetails, self).__init__(**kwargs) - self.data_destination_type = None # type: Optional[str] - self.account_id = kwargs.get('account_id', None) - self.share_password = kwargs.get('share_password', None) + super(DataTransferDetailsValidationRequest, self).__init__(**kwargs) + self.validation_type = 'ValidateDataTransferDetails' # type: str + self.data_export_details = kwargs.get('data_export_details', None) + self.data_import_details = kwargs.get('data_import_details', None) + self.device_type = kwargs['device_type'] + self.transfer_type = kwargs['transfer_type'] -class DestinationManagedDiskDetails(DestinationAccountDetails): - """Details for the destination compute disks. +class DataTransferDetailsValidationResponseProperties(ValidationInputResponse): + """Properties of data transfer details validation response. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param data_destination_type: Required. Data Destination Type.Constant filled by server. - Possible values include: "StorageAccount", "ManagedDisk". - :type data_destination_type: str or ~azure.mgmt.databox.models.DataDestinationType - :param account_id: Arm Id of the destination where the data has to be moved. - :type account_id: str - :param share_password: Share password to be shared by all shares in SA. - :type share_password: str - :param resource_group_id: Required. Destination Resource Group Id where the Compute disks - should be created. - :type resource_group_id: str - :param staging_storage_account_id: Required. Arm Id of the storage account that can be used to - copy the vhd for staging. - :type staging_storage_account_id: str + :param validation_type: Required. Identifies the type of validation response.Constant filled by + server. Possible values include: "ValidateAddress", + "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator + :ivar error: Error code and message of validation response. + :vartype error: ~data_box_management_client.models.CloudError + :ivar status: Data transfer details validation status. Possible values include: "Valid", + "Invalid", "Skipped". + :vartype status: str or ~data_box_management_client.models.ValidationStatus """ _validation = { - 'data_destination_type': {'required': True}, - 'resource_group_id': {'required': True}, - 'staging_storage_account_id': {'required': True}, + 'validation_type': {'required': True}, + 'error': {'readonly': True}, + 'status': {'readonly': True}, } _attribute_map = { - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, - 'account_id': {'key': 'accountId', 'type': 'str'}, - 'share_password': {'key': 'sharePassword', 'type': 'str'}, - 'resource_group_id': {'key': 'resourceGroupId', 'type': 'str'}, - 'staging_storage_account_id': {'key': 'stagingStorageAccountId', 'type': 'str'}, + 'validation_type': {'key': 'validationType', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'CloudError'}, + 'status': {'key': 'status', 'type': 'str'}, } def __init__( self, **kwargs ): - super(DestinationManagedDiskDetails, self).__init__(**kwargs) - self.data_destination_type = 'ManagedDisk' # type: str - self.resource_group_id = kwargs['resource_group_id'] - self.staging_storage_account_id = kwargs['staging_storage_account_id'] - + super(DataTransferDetailsValidationResponseProperties, self).__init__(**kwargs) + self.validation_type = 'ValidateDataTransferDetails' # type: str + self.status = None -class DestinationStorageAccountDetails(DestinationAccountDetails): - """Details for the destination storage account. - All required parameters must be populated in order to send to Azure. +class DcAccessSecurityCode(msrest.serialization.Model): + """Dc access security code. - :param data_destination_type: Required. Data Destination Type.Constant filled by server. - Possible values include: "StorageAccount", "ManagedDisk". - :type data_destination_type: str or ~azure.mgmt.databox.models.DataDestinationType - :param account_id: Arm Id of the destination where the data has to be moved. - :type account_id: str - :param share_password: Share password to be shared by all shares in SA. - :type share_password: str - :param storage_account_id: Required. Destination Storage Account Arm Id. - :type storage_account_id: str + :param reverse_dc_access_code: Reverse Dc access security code. + :type reverse_dc_access_code: str + :param forward_dc_access_code: Forward Dc access security code. + :type forward_dc_access_code: str """ - _validation = { - 'data_destination_type': {'required': True}, - 'storage_account_id': {'required': True}, - } - _attribute_map = { - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, - 'account_id': {'key': 'accountId', 'type': 'str'}, - 'share_password': {'key': 'sharePassword', 'type': 'str'}, - 'storage_account_id': {'key': 'storageAccountId', 'type': 'str'}, + 'reverse_dc_access_code': {'key': 'reverseDCAccessCode', 'type': 'str'}, + 'forward_dc_access_code': {'key': 'forwardDCAccessCode', 'type': 'str'}, } def __init__( self, **kwargs ): - super(DestinationStorageAccountDetails, self).__init__(**kwargs) - self.data_destination_type = 'StorageAccount' # type: str - self.storage_account_id = kwargs['storage_account_id'] + super(DcAccessSecurityCode, self).__init__(**kwargs) + self.reverse_dc_access_code = kwargs.get('reverse_dc_access_code', None) + self.forward_dc_access_code = kwargs.get('forward_dc_access_code', None) -class DestinationToServiceLocationMap(msrest.serialization.Model): - """Map of destination location to service location. +class Details(msrest.serialization.Model): + """Details. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar destination_location: Location of the destination. - :vartype destination_location: str - :ivar service_location: Location of the service. - :vartype service_location: str + :param code: Required. + :type code: str + :param message: Required. + :type message: str """ _validation = { - 'destination_location': {'readonly': True}, - 'service_location': {'readonly': True}, + 'code': {'required': True}, + 'message': {'required': True}, } _attribute_map = { - 'destination_location': {'key': 'destinationLocation', 'type': 'str'}, - 'service_location': {'key': 'serviceLocation', 'type': 'str'}, + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, } def __init__( self, **kwargs ): - super(DestinationToServiceLocationMap, self).__init__(**kwargs) - self.destination_location = None - self.service_location = None + super(Details, self).__init__(**kwargs) + self.code = kwargs['code'] + self.message = kwargs['message'] class DiskScheduleAvailabilityRequest(ScheduleAvailabilityRequest): @@ -1813,28 +2059,30 @@ class DiskScheduleAvailabilityRequest(ScheduleAvailabilityRequest): All required parameters must be populated in order to send to Azure. - :param storage_location: Required. Location for data transfer. - For locations check: https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api- - version=2018-01-01. + :param storage_location: Required. Location for data transfer. For locations check: + https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. :type storage_location: str :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type sku_name: str or ~azure.mgmt.databox.models.SkuName - :param expected_data_size_in_terabytes: Required. The expected size of the data, which needs to - be transferred in this job, in terabytes. - :type expected_data_size_in_terabytes: int + :type sku_name: str or ~data_box_management_client.models.SkuName + :param country: Country in which storage location should be supported. + :type country: str + :param expected_data_size_in_tera_bytes: Required. The expected size of the data, which needs + to be transferred in this job, in terabytes. + :type expected_data_size_in_tera_bytes: int """ _validation = { 'storage_location': {'required': True}, 'sku_name': {'required': True}, - 'expected_data_size_in_terabytes': {'required': True}, + 'expected_data_size_in_tera_bytes': {'required': True}, } _attribute_map = { 'storage_location': {'key': 'storageLocation', 'type': 'str'}, 'sku_name': {'key': 'skuName', 'type': 'str'}, - 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, + 'country': {'key': 'country', 'type': 'str'}, + 'expected_data_size_in_tera_bytes': {'key': 'expectedDataSizeInTeraBytes', 'type': 'int'}, } def __init__( @@ -1843,7 +2091,7 @@ def __init__( ): super(DiskScheduleAvailabilityRequest, self).__init__(**kwargs) self.sku_name = 'DataBoxDisk' # type: str - self.expected_data_size_in_terabytes = kwargs['expected_data_size_in_terabytes'] + self.expected_data_size_in_tera_bytes = kwargs['expected_data_size_in_tera_bytes'] class DiskSecret(msrest.serialization.Model): @@ -1877,48 +2125,109 @@ def __init__( self.bit_locker_key = None -class Error(msrest.serialization.Model): - """Top level error for the job. - - Variables are only populated by the server, and will be ignored when sending a request. +class EncryptionPreferences(msrest.serialization.Model): + """Preferences related to the Encryption. - :ivar code: Error code that can be used to programmatically identify the error. - :vartype code: str - :ivar message: Describes the error in detail and provides debugging information. - :vartype message: str + :param double_encryption: Defines secondary layer of software-based encryption enablement. + Possible values include: "Enabled", "Disabled". Default value: "Disabled". + :type double_encryption: str or ~data_box_management_client.models.DoubleEncryption """ - _validation = { - 'code': {'readonly': True}, - 'message': {'readonly': True}, - } - _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, + 'double_encryption': {'key': 'doubleEncryption', 'type': 'str'}, } def __init__( self, **kwargs ): - super(Error, self).__init__(**kwargs) - self.code = None - self.message = None + super(EncryptionPreferences, self).__init__(**kwargs) + self.double_encryption = kwargs.get('double_encryption', "Disabled") -class HeavyScheduleAvailabilityRequest(ScheduleAvailabilityRequest): - """Request body to get the availability for scheduling heavy orders. +class ErrorDetail(msrest.serialization.Model): + """ErrorDetail. All required parameters must be populated in order to send to Azure. - :param storage_location: Required. Location for data transfer. - For locations check: https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api- - version=2018-01-01. + :param code: Required. + :type code: str + :param message: Required. + :type message: str + :param details: + :type details: list[~data_box_management_client.models.Details] + :param target: + :type target: str + """ + + _validation = { + 'code': {'required': True}, + 'message': {'required': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[Details]'}, + 'target': {'key': 'target', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorDetail, self).__init__(**kwargs) + self.code = kwargs['code'] + self.message = kwargs['message'] + self.details = kwargs.get('details', None) + self.target = kwargs.get('target', None) + + +class FilterFileDetails(msrest.serialization.Model): + """Details of the filter files to be used for data transfer. + + All required parameters must be populated in order to send to Azure. + + :param filter_file_type: Required. Type of the filter file. Possible values include: + "AzureBlob", "AzureFile". + :type filter_file_type: str or ~data_box_management_client.models.FilterFileType + :param filter_file_path: Required. Path of the file that contains the details of all items to + transfer. + :type filter_file_path: str + """ + + _validation = { + 'filter_file_type': {'required': True}, + 'filter_file_path': {'required': True}, + } + + _attribute_map = { + 'filter_file_type': {'key': 'filterFileType', 'type': 'str'}, + 'filter_file_path': {'key': 'filterFilePath', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(FilterFileDetails, self).__init__(**kwargs) + self.filter_file_type = kwargs['filter_file_type'] + self.filter_file_path = kwargs['filter_file_path'] + + +class HeavyScheduleAvailabilityRequest(ScheduleAvailabilityRequest): + """Request body to get the availability for scheduling heavy orders. + + All required parameters must be populated in order to send to Azure. + + :param storage_location: Required. Location for data transfer. For locations check: + https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. :type storage_location: str :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type sku_name: str or ~azure.mgmt.databox.models.SkuName + :type sku_name: str or ~data_box_management_client.models.SkuName + :param country: Country in which storage location should be supported. + :type country: str """ _validation = { @@ -1929,6 +2238,7 @@ class HeavyScheduleAvailabilityRequest(ScheduleAvailabilityRequest): _attribute_map = { 'storage_location': {'key': 'storageLocation', 'type': 'str'}, 'sku_name': {'key': 'skuName', 'type': 'str'}, + 'country': {'key': 'country', 'type': 'str'}, } def __init__( @@ -1939,63 +2249,46 @@ def __init__( self.sku_name = 'DataBoxHeavy' # type: str -class JobDeliveryInfo(msrest.serialization.Model): - """Additional delivery info. +class IdentityProperties(msrest.serialization.Model): + """Managed identity properties. - :param scheduled_date_time: Scheduled date time. - :type scheduled_date_time: ~datetime.datetime + :param type: Managed service identity type. + :type type: str + :param user_assigned: User assigned identity properties. + :type user_assigned: ~data_box_management_client.models.UserAssignedProperties """ _attribute_map = { - 'scheduled_date_time': {'key': 'scheduledDateTime', 'type': 'iso-8601'}, + 'type': {'key': 'type', 'type': 'str'}, + 'user_assigned': {'key': 'userAssigned', 'type': 'UserAssignedProperties'}, } def __init__( self, **kwargs ): - super(JobDeliveryInfo, self).__init__(**kwargs) - self.scheduled_date_time = kwargs.get('scheduled_date_time', None) + super(IdentityProperties, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + self.user_assigned = kwargs.get('user_assigned', None) -class JobErrorDetails(msrest.serialization.Model): - """Job Error Details for providing the information and recommended action. - - Variables are only populated by the server, and will be ignored when sending a request. +class JobDeliveryInfo(msrest.serialization.Model): + """Additional delivery info. - :ivar error_message: Message for the error. - :vartype error_message: str - :ivar error_code: Code for the error. - :vartype error_code: int - :ivar recommended_action: Recommended action for the error. - :vartype recommended_action: str - :ivar exception_message: Contains the non localized exception message. - :vartype exception_message: str + :param scheduled_date_time: Scheduled date time. + :type scheduled_date_time: ~datetime.datetime """ - _validation = { - 'error_message': {'readonly': True}, - 'error_code': {'readonly': True}, - 'recommended_action': {'readonly': True}, - 'exception_message': {'readonly': True}, - } - _attribute_map = { - 'error_message': {'key': 'errorMessage', 'type': 'str'}, - 'error_code': {'key': 'errorCode', 'type': 'int'}, - 'recommended_action': {'key': 'recommendedAction', 'type': 'str'}, - 'exception_message': {'key': 'exceptionMessage', 'type': 'str'}, + 'scheduled_date_time': {'key': 'scheduledDateTime', 'type': 'iso-8601'}, } def __init__( self, **kwargs ): - super(JobErrorDetails, self).__init__(**kwargs) - self.error_message = None - self.error_code = None - self.recommended_action = None - self.exception_message = None + super(JobDeliveryInfo, self).__init__(**kwargs) + self.scheduled_date_time = kwargs.get('scheduled_date_time', None) class Resource(msrest.serialization.Model): @@ -2012,7 +2305,9 @@ class Resource(msrest.serialization.Model): can be used in viewing and grouping this resource (across resource groups). :type tags: dict[str, str] :param sku: Required. The sku type. - :type sku: ~azure.mgmt.databox.models.Sku + :type sku: ~data_box_management_client.models.Sku + :param identity: Msi identity of the resource. + :type identity: ~data_box_management_client.models.ResourceIdentity """ _validation = { @@ -2024,6 +2319,7 @@ class Resource(msrest.serialization.Model): 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'sku': {'key': 'sku', 'type': 'Sku'}, + 'identity': {'key': 'identity', 'type': 'ResourceIdentity'}, } def __init__( @@ -2034,6 +2330,7 @@ def __init__( self.location = kwargs['location'] self.tags = kwargs.get('tags', None) self.sku = kwargs['sku'] + self.identity = kwargs.get('identity', None) class JobResource(Resource): @@ -2052,38 +2349,47 @@ class JobResource(Resource): can be used in viewing and grouping this resource (across resource groups). :type tags: dict[str, str] :param sku: Required. The sku type. - :type sku: ~azure.mgmt.databox.models.Sku + :type sku: ~data_box_management_client.models.Sku + :param identity: Msi identity of the resource. + :type identity: ~data_box_management_client.models.ResourceIdentity :ivar name: Name of the object. :vartype name: str :ivar id: Id of the object. :vartype id: str :ivar type: Type of the object. :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~data_box_management_client.models.SystemData + :param transfer_type: Required. Type of the data transfer. Possible values include: + "ImportToAzure", "ExportFromAzure". + :type transfer_type: str or ~data_box_management_client.models.TransferType :ivar is_cancellable: Describes whether the job is cancellable or not. :vartype is_cancellable: bool :ivar is_deletable: Describes whether the job is deletable or not. :vartype is_deletable: bool :ivar is_shipping_address_editable: Describes whether the shipping address is editable or not. :vartype is_shipping_address_editable: bool + :ivar is_prepare_to_ship_enabled: Is Prepare To Ship Enabled on this job. + :vartype is_prepare_to_ship_enabled: bool :ivar status: Name of the stage which is in progress. Possible values include: "DeviceOrdered", "DevicePrepared", "Dispatched", "Delivered", "PickedUp", "AtAzureDC", "DataCopy", "Completed", "CompletedWithErrors", "Cancelled", "Failed_IssueReportedAtCustomer", "Failed_IssueDetectedAtAzureDC", "Aborted", "CompletedWithWarnings", "ReadyToDispatchFromAzureDC", "ReadyToReceiveAtAzureDC". - :vartype status: str or ~azure.mgmt.databox.models.StageName + :vartype status: str or ~data_box_management_client.models.StageName :ivar start_time: Time at which the job was started in UTC ISO 8601 format. :vartype start_time: ~datetime.datetime :ivar error: Top level error for the job. - :vartype error: ~azure.mgmt.databox.models.Error + :vartype error: ~data_box_management_client.models.CloudError :param details: Details of a job run. This field will only be sent for expand details filter. - :type details: ~azure.mgmt.databox.models.JobDetails + :type details: ~data_box_management_client.models.JobDetails :ivar cancellation_reason: Reason for cancellation. :vartype cancellation_reason: str :param delivery_type: Delivery type of Job. Possible values include: "NonScheduled", - "Scheduled". - :type delivery_type: str or ~azure.mgmt.databox.models.JobDeliveryType + "Scheduled". Default value: "NonScheduled". + :type delivery_type: str or ~data_box_management_client.models.JobDeliveryType :param delivery_info: Delivery Info of Job. - :type delivery_info: ~azure.mgmt.databox.models.JobDeliveryInfo + :type delivery_info: ~data_box_management_client.models.JobDeliveryInfo :ivar is_cancellable_without_fee: Flag to indicate cancellation of scheduled job. :vartype is_cancellable_without_fee: bool """ @@ -2094,9 +2400,12 @@ class JobResource(Resource): 'name': {'readonly': True}, 'id': {'readonly': True}, 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'transfer_type': {'required': True}, 'is_cancellable': {'readonly': True}, 'is_deletable': {'readonly': True}, 'is_shipping_address_editable': {'readonly': True}, + 'is_prepare_to_ship_enabled': {'readonly': True}, 'status': {'readonly': True}, 'start_time': {'readonly': True}, 'error': {'readonly': True}, @@ -2108,15 +2417,19 @@ class JobResource(Resource): 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'sku': {'key': 'sku', 'type': 'Sku'}, + 'identity': {'key': 'identity', 'type': 'ResourceIdentity'}, 'name': {'key': 'name', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'transfer_type': {'key': 'properties.transferType', 'type': 'str'}, 'is_cancellable': {'key': 'properties.isCancellable', 'type': 'bool'}, 'is_deletable': {'key': 'properties.isDeletable', 'type': 'bool'}, 'is_shipping_address_editable': {'key': 'properties.isShippingAddressEditable', 'type': 'bool'}, + 'is_prepare_to_ship_enabled': {'key': 'properties.isPrepareToShipEnabled', 'type': 'bool'}, 'status': {'key': 'properties.status', 'type': 'str'}, 'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'}, - 'error': {'key': 'properties.error', 'type': 'Error'}, + 'error': {'key': 'properties.error', 'type': 'CloudError'}, 'details': {'key': 'properties.details', 'type': 'JobDetails'}, 'cancellation_reason': {'key': 'properties.cancellationReason', 'type': 'str'}, 'delivery_type': {'key': 'properties.deliveryType', 'type': 'str'}, @@ -2132,15 +2445,18 @@ def __init__( self.name = None self.id = None self.type = None + self.system_data = None + self.transfer_type = kwargs['transfer_type'] self.is_cancellable = None self.is_deletable = None self.is_shipping_address_editable = None + self.is_prepare_to_ship_enabled = None self.status = None self.start_time = None self.error = None self.details = kwargs.get('details', None) self.cancellation_reason = None - self.delivery_type = kwargs.get('delivery_type', None) + self.delivery_type = kwargs.get('delivery_type', "NonScheduled") self.delivery_info = kwargs.get('delivery_info', None) self.is_cancellable_without_fee = None @@ -2149,7 +2465,7 @@ class JobResourceList(msrest.serialization.Model): """Job Resource Collection. :param value: List of job resources. - :type value: list[~azure.mgmt.databox.models.JobResource] + :type value: list[~data_box_management_client.models.JobResource] :param next_link: Link for the next set of job resources. :type next_link: str """ @@ -2174,16 +2490,16 @@ class JobResourceUpdateParameter(msrest.serialization.Model): :param tags: A set of tags. The list of key value pairs that describe the resource. These tags can be used in viewing and grouping this resource (across resource groups). :type tags: dict[str, str] + :param identity: Msi identity of the resource. + :type identity: ~data_box_management_client.models.ResourceIdentity :param details: Details of a job to be updated. - :type details: ~azure.mgmt.databox.models.UpdateJobDetails - :param destination_account_details: Destination account details. - :type destination_account_details: list[~azure.mgmt.databox.models.DestinationAccountDetails] + :type details: ~data_box_management_client.models.UpdateJobDetails """ _attribute_map = { 'tags': {'key': 'tags', 'type': '{str}'}, + 'identity': {'key': 'identity', 'type': 'ResourceIdentity'}, 'details': {'key': 'properties.details', 'type': 'UpdateJobDetails'}, - 'destination_account_details': {'key': 'properties.destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, } def __init__( @@ -2192,8 +2508,8 @@ def __init__( ): super(JobResourceUpdateParameter, self).__init__(**kwargs) self.tags = kwargs.get('tags', None) + self.identity = kwargs.get('identity', None) self.details = kwargs.get('details', None) - self.destination_account_details = kwargs.get('destination_account_details', None) class JobStages(msrest.serialization.Model): @@ -2206,18 +2522,17 @@ class JobStages(msrest.serialization.Model): "CompletedWithErrors", "Cancelled", "Failed_IssueReportedAtCustomer", "Failed_IssueDetectedAtAzureDC", "Aborted", "CompletedWithWarnings", "ReadyToDispatchFromAzureDC", "ReadyToReceiveAtAzureDC". - :vartype stage_name: str or ~azure.mgmt.databox.models.StageName + :vartype stage_name: str or ~data_box_management_client.models.StageName :ivar display_name: Display name of the job stage. :vartype display_name: str :ivar stage_status: Status of the job stage. Possible values include: "None", "InProgress", - "Succeeded", "Failed", "Cancelled", "Cancelling", "SucceededWithErrors". - :vartype stage_status: str or ~azure.mgmt.databox.models.StageStatus + "Succeeded", "Failed", "Cancelled", "Cancelling", "SucceededWithErrors", + "WaitingForCustomerAction", "SucceededWithWarnings". + :vartype stage_status: str or ~data_box_management_client.models.StageStatus :ivar stage_time: Time for the job stage in UTC ISO 8601 format. :vartype stage_time: ~datetime.datetime :ivar job_stage_details: Job Stage Details. :vartype job_stage_details: object - :ivar error_details: Error details for the stage. - :vartype error_details: list[~azure.mgmt.databox.models.JobErrorDetails] """ _validation = { @@ -2226,7 +2541,6 @@ class JobStages(msrest.serialization.Model): 'stage_status': {'readonly': True}, 'stage_time': {'readonly': True}, 'job_stage_details': {'readonly': True}, - 'error_details': {'readonly': True}, } _attribute_map = { @@ -2235,7 +2549,6 @@ class JobStages(msrest.serialization.Model): 'stage_status': {'key': 'stageStatus', 'type': 'str'}, 'stage_time': {'key': 'stageTime', 'type': 'iso-8601'}, 'job_stage_details': {'key': 'jobStageDetails', 'type': 'object'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, } def __init__( @@ -2248,7 +2561,146 @@ def __init__( self.stage_status = None self.stage_time = None self.job_stage_details = None - self.error_details = None + + +class KeyEncryptionKey(msrest.serialization.Model): + """Encryption key containing details about key to encrypt different keys. + + All required parameters must be populated in order to send to Azure. + + :param kek_type: Required. Type of encryption key used for key encryption. Possible values + include: "MicrosoftManaged", "CustomerManaged". Default value: "MicrosoftManaged". + :type kek_type: str or ~data_box_management_client.models.KekType + :param identity_properties: Managed identity properties used for key encryption. + :type identity_properties: ~data_box_management_client.models.IdentityProperties + :param kek_url: Key encryption key. It is required in case of Customer managed KekType. + :type kek_url: str + :param kek_vault_resource_id: Kek vault resource id. It is required in case of Customer managed + KekType. + :type kek_vault_resource_id: str + """ + + _validation = { + 'kek_type': {'required': True}, + } + + _attribute_map = { + 'kek_type': {'key': 'kekType', 'type': 'str'}, + 'identity_properties': {'key': 'identityProperties', 'type': 'IdentityProperties'}, + 'kek_url': {'key': 'kekUrl', 'type': 'str'}, + 'kek_vault_resource_id': {'key': 'kekVaultResourceID', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(KeyEncryptionKey, self).__init__(**kwargs) + self.kek_type = kwargs.get('kek_type', "MicrosoftManaged") + self.identity_properties = kwargs.get('identity_properties', None) + self.kek_url = kwargs.get('kek_url', None) + self.kek_vault_resource_id = kwargs.get('kek_vault_resource_id', None) + + +class LastMitigationActionOnJob(msrest.serialization.Model): + """Last Mitigation Action Performed On Job. + + :param action_date_time_in_utc: Action performed date time. + :type action_date_time_in_utc: ~datetime.datetime + :param is_performed_by_customer: Action performed by customer, + possibility is that mitigation might happen by customer or service or by ops. + :type is_performed_by_customer: bool + :param customer_resolution: Resolution code provided by customer. Possible values include: + "None", "MoveToCleanUpDevice", "Resume". + :type customer_resolution: str or ~data_box_management_client.models.CustomerResolutionCode + """ + + _attribute_map = { + 'action_date_time_in_utc': {'key': 'actionDateTimeInUtc', 'type': 'iso-8601'}, + 'is_performed_by_customer': {'key': 'isPerformedByCustomer', 'type': 'bool'}, + 'customer_resolution': {'key': 'customerResolution', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(LastMitigationActionOnJob, self).__init__(**kwargs) + self.action_date_time_in_utc = kwargs.get('action_date_time_in_utc', None) + self.is_performed_by_customer = kwargs.get('is_performed_by_customer', None) + self.customer_resolution = kwargs.get('customer_resolution', None) + + +class ManagedDiskDetails(DataAccountDetails): + """Details of the managed disks. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Account Type of the data to be transferred.Constant filled + by server. Possible values include: "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param share_password: Password for all the shares to be created on the device. Should not be + passed for TransferType:ExportFromAzure jobs. If this is not passed, the service will generate + password itself. This will not be returned in Get Call. Password Requirements : Password must + be minimum of 12 and maximum of 64 characters. Password must have at least one uppercase + alphabet, one number and one special character. Password cannot have the following characters : + IilLoO0 Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. + :type share_password: str + :param resource_group_id: Required. Resource Group Id of the compute disks. + :type resource_group_id: str + :param staging_storage_account_id: Required. Resource Id of the storage account that can be + used to copy the vhd for staging. + :type staging_storage_account_id: str + """ + + _validation = { + 'data_account_type': {'required': True}, + 'resource_group_id': {'required': True}, + 'staging_storage_account_id': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'share_password': {'key': 'sharePassword', 'type': 'str'}, + 'resource_group_id': {'key': 'resourceGroupId', 'type': 'str'}, + 'staging_storage_account_id': {'key': 'stagingStorageAccountId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ManagedDiskDetails, self).__init__(**kwargs) + self.data_account_type = 'ManagedDisk' # type: str + self.resource_group_id = kwargs['resource_group_id'] + self.staging_storage_account_id = kwargs['staging_storage_account_id'] + + +class MitigateJobRequest(msrest.serialization.Model): + """The Mitigate Job captured from request body for Mitigate API. + + All required parameters must be populated in order to send to Azure. + + :param customer_resolution_code: Required. Resolution code for the job. Possible values + include: "None", "MoveToCleanUpDevice", "Resume". + :type customer_resolution_code: str or + ~data_box_management_client.models.CustomerResolutionCode + """ + + _validation = { + 'customer_resolution_code': {'required': True}, + } + + _attribute_map = { + 'customer_resolution_code': {'key': 'customerResolutionCode', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(MitigateJobRequest, self).__init__(**kwargs) + self.customer_resolution_code = kwargs['customer_resolution_code'] class NotificationPreference(msrest.serialization.Model): @@ -2258,7 +2710,7 @@ class NotificationPreference(msrest.serialization.Model): :param stage_name: Required. Name of the stage. Possible values include: "DevicePrepared", "Dispatched", "Delivered", "PickedUp", "AtAzureDC", "DataCopy". - :type stage_name: str or ~azure.mgmt.databox.models.NotificationStageName + :type stage_name: str or ~data_box_management_client.models.NotificationStageName :param send_notification: Required. Notification is required or not. :type send_notification: bool """ @@ -2279,7 +2731,7 @@ def __init__( ): super(NotificationPreference, self).__init__(**kwargs) self.stage_name = kwargs['stage_name'] - self.send_notification = kwargs['send_notification'] + self.send_notification = kwargs.get('send_notification', True) class Operation(msrest.serialization.Model): @@ -2291,11 +2743,13 @@ class Operation(msrest.serialization.Model): {resourceProviderNamespace}/{resourceType}/{read|write|delete|action}. :vartype name: str :ivar display: Operation display values. - :vartype display: ~azure.mgmt.databox.models.OperationDisplay + :vartype display: ~data_box_management_client.models.OperationDisplay :ivar properties: Operation properties. :vartype properties: object :ivar origin: Origin of the operation. Can be : user|system|user,system. :vartype origin: str + :param is_data_action: Indicates whether the operation is a data action. + :type is_data_action: bool """ _validation = { @@ -2310,6 +2764,7 @@ class Operation(msrest.serialization.Model): 'display': {'key': 'display', 'type': 'OperationDisplay'}, 'properties': {'key': 'properties', 'type': 'object'}, 'origin': {'key': 'origin', 'type': 'str'}, + 'is_data_action': {'key': 'isDataAction', 'type': 'bool'}, } def __init__( @@ -2321,6 +2776,7 @@ def __init__( self.display = None self.properties = None self.origin = None + self.is_data_action = kwargs.get('is_data_action', None) class OperationDisplay(msrest.serialization.Model): @@ -2360,7 +2816,7 @@ class OperationList(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. :ivar value: List of operations. - :vartype value: list[~azure.mgmt.databox.models.Operation] + :vartype value: list[~data_box_management_client.models.Operation] :param next_link: Link for the next set of operations. :type next_link: str """ @@ -2421,15 +2877,18 @@ def __init__( class Preferences(msrest.serialization.Model): """Preferences related to the order. - :param preferred_data_center_region: Preferred Data Center Region. + :param preferred_data_center_region: Preferred data center region. :type preferred_data_center_region: list[str] :param transport_preferences: Preferences related to the shipment logistics of the sku. - :type transport_preferences: ~azure.mgmt.databox.models.TransportPreferences + :type transport_preferences: ~data_box_management_client.models.TransportPreferences + :param encryption_preferences: Preferences related to the Encryption. + :type encryption_preferences: ~data_box_management_client.models.EncryptionPreferences """ _attribute_map = { 'preferred_data_center_region': {'key': 'preferredDataCenterRegion', 'type': '[str]'}, 'transport_preferences': {'key': 'transportPreferences', 'type': 'TransportPreferences'}, + 'encryption_preferences': {'key': 'encryptionPreferences', 'type': 'EncryptionPreferences'}, } def __init__( @@ -2439,6 +2898,7 @@ def __init__( super(Preferences, self).__init__(**kwargs) self.preferred_data_center_region = kwargs.get('preferred_data_center_region', None) self.transport_preferences = kwargs.get('transport_preferences', None) + self.encryption_preferences = kwargs.get('encryption_preferences', None) class PreferencesValidationRequest(ValidationInputRequest): @@ -2447,15 +2907,15 @@ class PreferencesValidationRequest(ValidationInputRequest): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator - :param preference: Preference requested with respect to transport type and data center. - :type preference: ~azure.mgmt.databox.models.Preferences + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator + :param preference: Preference of transport and data center. + :type preference: ~data_box_management_client.models.Preferences :param device_type: Required. Device type to be used for the job. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type device_type: str or ~azure.mgmt.databox.models.SkuName + :type device_type: str or ~data_box_management_client.models.SkuName """ _validation = { @@ -2487,15 +2947,15 @@ class PreferencesValidationResponseProperties(ValidationInputResponse): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar status: Validation status of requested data center and transport. Possible values include: "Valid", "Invalid", "Skipped". - :vartype status: str or ~azure.mgmt.databox.models.ValidationStatus + :vartype status: str or ~data_box_management_client.models.ValidationStatus """ _validation = { @@ -2506,7 +2966,7 @@ class PreferencesValidationResponseProperties(ValidationInputResponse): _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'status': {'key': 'status', 'type': 'str'}, } @@ -2524,10 +2984,12 @@ class RegionConfigurationRequest(msrest.serialization.Model): :param schedule_availability_request: Request body to get the availability for scheduling orders. - :type schedule_availability_request: ~azure.mgmt.databox.models.ScheduleAvailabilityRequest + :type schedule_availability_request: + ~data_box_management_client.models.ScheduleAvailabilityRequest :param transport_availability_request: Request body to get the transport availability for given sku. - :type transport_availability_request: ~azure.mgmt.databox.models.TransportAvailabilityRequest + :type transport_availability_request: + ~data_box_management_client.models.TransportAvailabilityRequest """ _attribute_map = { @@ -2551,10 +3013,10 @@ class RegionConfigurationResponse(msrest.serialization.Model): :ivar schedule_availability_response: Schedule availability for given sku in a region. :vartype schedule_availability_response: - ~azure.mgmt.databox.models.ScheduleAvailabilityResponse + ~data_box_management_client.models.ScheduleAvailabilityResponse :ivar transport_availability_response: Transport options available for given sku in a region. :vartype transport_availability_response: - ~azure.mgmt.databox.models.TransportAvailabilityResponse + ~data_box_management_client.models.TransportAvailabilityResponse """ _validation = { @@ -2576,8 +3038,47 @@ def __init__( self.transport_availability_response = None +class ResourceIdentity(msrest.serialization.Model): + """Msi identity details of the resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param type: Identity type. + :type type: str + :ivar principal_id: Service Principal Id backing the Msi. + :vartype principal_id: str + :ivar tenant_id: Home Tenant Id. + :vartype tenant_id: str + :param user_assigned_identities: User Assigned Identities. + :type user_assigned_identities: dict[str, + ~data_box_management_client.models.UserAssignedIdentity] + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'}, + } + + def __init__( + self, + **kwargs + ): + super(ResourceIdentity, self).__init__(**kwargs) + self.type = kwargs.get('type', "None") + self.principal_id = None + self.tenant_id = None + self.user_assigned_identities = kwargs.get('user_assigned_identities', None) + + class ScheduleAvailabilityResponse(msrest.serialization.Model): - """Schedule availability response for given sku in a region. + """Schedule availability for given sku in a region. Variables are only populated by the server, and will be ignored when sending a request. @@ -2610,13 +3111,14 @@ class ShareCredentialDetails(msrest.serialization.Model): :vartype share_name: str :ivar share_type: Type of the share. Possible values include: "UnknownType", "HCS", "BlockBlob", "PageBlob", "AzureFile", "ManagedDisk". - :vartype share_type: str or ~azure.mgmt.databox.models.ShareDestinationFormatType + :vartype share_type: str or ~data_box_management_client.models.ShareDestinationFormatType :ivar user_name: User name for the share. :vartype user_name: str :ivar password: Password for the share. :vartype password: str :ivar supported_access_protocols: Access protocols supported on the device. - :vartype supported_access_protocols: list[str or ~azure.mgmt.databox.models.AccessProtocol] + :vartype supported_access_protocols: list[str or + ~data_box_management_client.models.AccessProtocol] """ _validation = { @@ -2732,21 +3234,20 @@ class ShippingAddress(msrest.serialization.Model): :type state_or_province: str :param country: Required. Name of the Country. :type country: str - :param postal_code: Required. Postal code. + :param postal_code: Postal code. :type postal_code: str :param zip_extended_code: Extended Zip Code. :type zip_extended_code: str :param company_name: Name of the company. :type company_name: str :param address_type: Type of address. Possible values include: "None", "Residential", - "Commercial". - :type address_type: str or ~azure.mgmt.databox.models.AddressType + "Commercial". Default value: "None". + :type address_type: str or ~data_box_management_client.models.AddressType """ _validation = { 'street_address1': {'required': True}, 'country': {'required': True}, - 'postal_code': {'required': True}, } _attribute_map = { @@ -2773,10 +3274,10 @@ def __init__( self.city = kwargs.get('city', None) self.state_or_province = kwargs.get('state_or_province', None) self.country = kwargs['country'] - self.postal_code = kwargs['postal_code'] + self.postal_code = kwargs.get('postal_code', None) self.zip_extended_code = kwargs.get('zip_extended_code', None) self.company_name = kwargs.get('company_name', None) - self.address_type = kwargs.get('address_type', None) + self.address_type = kwargs.get('address_type', "None") class Sku(msrest.serialization.Model): @@ -2786,7 +3287,7 @@ class Sku(msrest.serialization.Model): :param name: Required. The sku name. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type name: str or ~azure.mgmt.databox.models.SkuName + :type name: str or ~data_box_management_client.models.SkuName :param display_name: The display name of the sku. :type display_name: str :param family: The sku family. @@ -2816,20 +3317,19 @@ def __init__( class SkuAvailabilityValidationRequest(ValidationInputRequest): """Request to validate sku availability. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :param device_type: Required. Device type to be used for the job. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type device_type: str or ~azure.mgmt.databox.models.SkuName - :ivar transfer_type: Required. Type of the transfer. Default value: "ImportToAzure". - :vartype transfer_type: str + :type device_type: str or ~data_box_management_client.models.SkuName + :param transfer_type: Required. Type of the transfer. Possible values include: "ImportToAzure", + "ExportFromAzure". + :type transfer_type: str or ~data_box_management_client.models.TransferType :param country: Required. ISO country code. Country for hardware shipment. For codes check: https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements. :type country: str @@ -2841,7 +3341,7 @@ class SkuAvailabilityValidationRequest(ValidationInputRequest): _validation = { 'validation_type': {'required': True}, 'device_type': {'required': True}, - 'transfer_type': {'required': True, 'constant': True}, + 'transfer_type': {'required': True}, 'country': {'required': True}, 'location': {'required': True}, } @@ -2854,8 +3354,6 @@ class SkuAvailabilityValidationRequest(ValidationInputRequest): 'location': {'key': 'location', 'type': 'str'}, } - transfer_type = "ImportToAzure" - def __init__( self, **kwargs @@ -2863,6 +3361,7 @@ def __init__( super(SkuAvailabilityValidationRequest, self).__init__(**kwargs) self.validation_type = 'ValidateSkuAvailability' # type: str self.device_type = kwargs['device_type'] + self.transfer_type = kwargs['transfer_type'] self.country = kwargs['country'] self.location = kwargs['location'] @@ -2875,15 +3374,15 @@ class SkuAvailabilityValidationResponseProperties(ValidationInputResponse): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar status: Sku availability validation status. Possible values include: "Valid", "Invalid", "Skipped". - :vartype status: str or ~azure.mgmt.databox.models.ValidationStatus + :vartype status: str or ~data_box_management_client.models.ValidationStatus """ _validation = { @@ -2894,7 +3393,7 @@ class SkuAvailabilityValidationResponseProperties(ValidationInputResponse): _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'status': {'key': 'status', 'type': 'str'}, } @@ -2946,16 +3445,22 @@ class SkuCost(msrest.serialization.Model): :vartype meter_id: str :ivar meter_type: The type of the meter. :vartype meter_type: str + :ivar multiplier: Multiplier specifies the region specific value to be multiplied with 1$ guid. + Eg: Our new regions will be using 1$ shipping guid with appropriate multiplier specific to + region. + :vartype multiplier: float """ _validation = { 'meter_id': {'readonly': True}, 'meter_type': {'readonly': True}, + 'multiplier': {'readonly': True}, } _attribute_map = { 'meter_id': {'key': 'meterId', 'type': 'str'}, 'meter_type': {'key': 'meterType', 'type': 'str'}, + 'multiplier': {'key': 'multiplier', 'type': 'float'}, } def __init__( @@ -2965,6 +3470,7 @@ def __init__( super(SkuCost, self).__init__(**kwargs) self.meter_id = None self.meter_type = None + self.multiplier = None class SkuInformation(msrest.serialization.Model): @@ -2973,21 +3479,21 @@ class SkuInformation(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. :ivar sku: The Sku. - :vartype sku: ~azure.mgmt.databox.models.Sku + :vartype sku: ~data_box_management_client.models.Sku :ivar enabled: The sku is enabled or not. :vartype enabled: bool - :ivar destination_to_service_location_map: The map of destination location to service location. - :vartype destination_to_service_location_map: - list[~azure.mgmt.databox.models.DestinationToServiceLocationMap] + :ivar data_location_to_service_location_map: The map of data location to service location. + :vartype data_location_to_service_location_map: + list[~data_box_management_client.models.DataLocationToServiceLocationMap] :ivar capacity: Capacity of the Sku. - :vartype capacity: ~azure.mgmt.databox.models.SkuCapacity + :vartype capacity: ~data_box_management_client.models.SkuCapacity :ivar costs: Cost of the Sku. - :vartype costs: list[~azure.mgmt.databox.models.SkuCost] + :vartype costs: list[~data_box_management_client.models.SkuCost] :ivar api_versions: Api versions that support this Sku. :vartype api_versions: list[str] :ivar disabled_reason: Reason why the Sku is disabled. Possible values include: "None", "Country", "Region", "Feature", "OfferType", "NoSubscriptionInfo". - :vartype disabled_reason: str or ~azure.mgmt.databox.models.SkuDisabledReason + :vartype disabled_reason: str or ~data_box_management_client.models.SkuDisabledReason :ivar disabled_reason_message: Message for why the Sku is disabled. :vartype disabled_reason_message: str :ivar required_feature: Required feature to access the sku. @@ -2997,7 +3503,7 @@ class SkuInformation(msrest.serialization.Model): _validation = { 'sku': {'readonly': True}, 'enabled': {'readonly': True}, - 'destination_to_service_location_map': {'readonly': True}, + 'data_location_to_service_location_map': {'readonly': True}, 'capacity': {'readonly': True}, 'costs': {'readonly': True}, 'api_versions': {'readonly': True}, @@ -3009,7 +3515,7 @@ class SkuInformation(msrest.serialization.Model): _attribute_map = { 'sku': {'key': 'sku', 'type': 'Sku'}, 'enabled': {'key': 'enabled', 'type': 'bool'}, - 'destination_to_service_location_map': {'key': 'properties.destinationToServiceLocationMap', 'type': '[DestinationToServiceLocationMap]'}, + 'data_location_to_service_location_map': {'key': 'properties.dataLocationToServiceLocationMap', 'type': '[DataLocationToServiceLocationMap]'}, 'capacity': {'key': 'properties.capacity', 'type': 'SkuCapacity'}, 'costs': {'key': 'properties.costs', 'type': '[SkuCost]'}, 'api_versions': {'key': 'properties.apiVersions', 'type': '[str]'}, @@ -3025,7 +3531,7 @@ def __init__( super(SkuInformation, self).__init__(**kwargs) self.sku = None self.enabled = None - self.destination_to_service_location_map = None + self.data_location_to_service_location_map = None self.capacity = None self.costs = None self.api_versions = None @@ -3034,16 +3540,55 @@ def __init__( self.required_feature = None +class StorageAccountDetails(DataAccountDetails): + """Details for the storage account. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Account Type of the data to be transferred.Constant filled + by server. Possible values include: "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param share_password: Password for all the shares to be created on the device. Should not be + passed for TransferType:ExportFromAzure jobs. If this is not passed, the service will generate + password itself. This will not be returned in Get Call. Password Requirements : Password must + be minimum of 12 and maximum of 64 characters. Password must have at least one uppercase + alphabet, one number and one special character. Password cannot have the following characters : + IilLoO0 Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. + :type share_password: str + :param storage_account_id: Required. Storage Account Resource Id. + :type storage_account_id: str + """ + + _validation = { + 'data_account_type': {'required': True}, + 'storage_account_id': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'share_password': {'key': 'sharePassword', 'type': 'str'}, + 'storage_account_id': {'key': 'storageAccountId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageAccountDetails, self).__init__(**kwargs) + self.data_account_type = 'StorageAccount' # type: str + self.storage_account_id = kwargs['storage_account_id'] + + class SubscriptionIsAllowedToCreateJobValidationRequest(ValidationInputRequest): """Request to validate subscription permission to create jobs. All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator """ _validation = { @@ -3070,15 +3615,15 @@ class SubscriptionIsAllowedToCreateJobValidationResponseProperties(ValidationInp All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar status: Validation status of subscription permission to create job. Possible values include: "Valid", "Invalid", "Skipped". - :vartype status: str or ~azure.mgmt.databox.models.ValidationStatus + :vartype status: str or ~data_box_management_client.models.ValidationStatus """ _validation = { @@ -3089,7 +3634,7 @@ class SubscriptionIsAllowedToCreateJobValidationResponseProperties(ValidationInp _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'status': {'key': 'status', 'type': 'str'}, } @@ -3102,6 +3647,207 @@ def __init__( self.status = None +class SystemData(msrest.serialization.Model): + """Provides details about resource creation and update time. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar created_by: A string identifier for the identity that created the resource. + :vartype created_by: str + :ivar created_by_type: The type of identity that created the resource: user, application, + managedIdentity. + :vartype created_by_type: str + :ivar created_at: The timestamp of resource creation (UTC). + :vartype created_at: ~datetime.datetime + :ivar last_modified_by: A string identifier for the identity that last modified the resource. + :vartype last_modified_by: str + :ivar last_modified_by_type: The type of identity that last modified the resource: user, + application, managedIdentity. + :vartype last_modified_by_type: str + :ivar last_modified_at: The timestamp of resource last modification (UTC). + :vartype last_modified_at: ~datetime.datetime + """ + + _validation = { + 'created_by': {'readonly': True}, + 'created_by_type': {'readonly': True}, + 'created_at': {'readonly': True}, + 'last_modified_by': {'readonly': True}, + 'last_modified_by_type': {'readonly': True}, + 'last_modified_at': {'readonly': True}, + } + + _attribute_map = { + 'created_by': {'key': 'createdBy', 'type': 'str'}, + 'created_by_type': {'key': 'createdByType', 'type': 'str'}, + 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, + 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, + 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, + 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, + } + + def __init__( + self, + **kwargs + ): + super(SystemData, self).__init__(**kwargs) + self.created_by = None + self.created_by_type = None + self.created_at = None + self.last_modified_by = None + self.last_modified_by_type = None + self.last_modified_at = None + + +class TransferAllDetails(msrest.serialization.Model): + """Details to transfer all data. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Type of the account of data. Possible values include: + "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param transfer_all_blobs: To indicate if all Azure blobs have to be transferred. + :type transfer_all_blobs: bool + :param transfer_all_files: To indicate if all Azure Files have to be transferred. + :type transfer_all_files: bool + """ + + _validation = { + 'data_account_type': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'transfer_all_blobs': {'key': 'transferAllBlobs', 'type': 'bool'}, + 'transfer_all_files': {'key': 'transferAllFiles', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(TransferAllDetails, self).__init__(**kwargs) + self.data_account_type = kwargs['data_account_type'] + self.transfer_all_blobs = kwargs.get('transfer_all_blobs', None) + self.transfer_all_files = kwargs.get('transfer_all_files', None) + + +class TransferConfiguration(msrest.serialization.Model): + """Configuration for defining the transfer of data. + + All required parameters must be populated in order to send to Azure. + + :param transfer_configuration_type: Required. Type of the configuration for transfer. Possible + values include: "TransferAll", "TransferUsingFilter". + :type transfer_configuration_type: str or + ~data_box_management_client.models.TransferConfigurationType + :param transfer_filter_details: Map of filter type and the details to filter. This field is + required only if the TransferConfigurationType is given as TransferUsingFilter. + :type transfer_filter_details: + ~data_box_management_client.models.TransferConfigurationTransferFilterDetails + :param transfer_all_details: Map of filter type and the details to transfer all data. This + field is required only if the TransferConfigurationType is given as TransferAll. + :type transfer_all_details: + ~data_box_management_client.models.TransferConfigurationTransferAllDetails + """ + + _validation = { + 'transfer_configuration_type': {'required': True}, + } + + _attribute_map = { + 'transfer_configuration_type': {'key': 'transferConfigurationType', 'type': 'str'}, + 'transfer_filter_details': {'key': 'transferFilterDetails', 'type': 'TransferConfigurationTransferFilterDetails'}, + 'transfer_all_details': {'key': 'transferAllDetails', 'type': 'TransferConfigurationTransferAllDetails'}, + } + + def __init__( + self, + **kwargs + ): + super(TransferConfiguration, self).__init__(**kwargs) + self.transfer_configuration_type = kwargs['transfer_configuration_type'] + self.transfer_filter_details = kwargs.get('transfer_filter_details', None) + self.transfer_all_details = kwargs.get('transfer_all_details', None) + + +class TransferConfigurationTransferAllDetails(msrest.serialization.Model): + """Map of filter type and the details to transfer all data. This field is required only if the TransferConfigurationType is given as TransferAll. + + :param include: Details to transfer all data. + :type include: ~data_box_management_client.models.TransferAllDetails + """ + + _attribute_map = { + 'include': {'key': 'include', 'type': 'TransferAllDetails'}, + } + + def __init__( + self, + **kwargs + ): + super(TransferConfigurationTransferAllDetails, self).__init__(**kwargs) + self.include = kwargs.get('include', None) + + +class TransferConfigurationTransferFilterDetails(msrest.serialization.Model): + """Map of filter type and the details to filter. This field is required only if the TransferConfigurationType is given as TransferUsingFilter. + + :param include: Details of the filtering the transfer of data. + :type include: ~data_box_management_client.models.TransferFilterDetails + """ + + _attribute_map = { + 'include': {'key': 'include', 'type': 'TransferFilterDetails'}, + } + + def __init__( + self, + **kwargs + ): + super(TransferConfigurationTransferFilterDetails, self).__init__(**kwargs) + self.include = kwargs.get('include', None) + + +class TransferFilterDetails(msrest.serialization.Model): + """Details of the filtering the transfer of data. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Type of the account of data. Possible values include: + "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param blob_filter_details: Filter details to transfer blobs. + :type blob_filter_details: ~data_box_management_client.models.BlobFilterDetails + :param azure_file_filter_details: Filter details to transfer Azure files. + :type azure_file_filter_details: ~data_box_management_client.models.AzureFileFilterDetails + :param filter_file_details: Details of the filter files to be used for data transfer. + :type filter_file_details: list[~data_box_management_client.models.FilterFileDetails] + """ + + _validation = { + 'data_account_type': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'blob_filter_details': {'key': 'blobFilterDetails', 'type': 'BlobFilterDetails'}, + 'azure_file_filter_details': {'key': 'azureFileFilterDetails', 'type': 'AzureFileFilterDetails'}, + 'filter_file_details': {'key': 'filterFileDetails', 'type': '[FilterFileDetails]'}, + } + + def __init__( + self, + **kwargs + ): + super(TransferFilterDetails, self).__init__(**kwargs) + self.data_account_type = kwargs['data_account_type'] + self.blob_filter_details = kwargs.get('blob_filter_details', None) + self.azure_file_filter_details = kwargs.get('azure_file_filter_details', None) + self.filter_file_details = kwargs.get('filter_file_details', None) + + class TransportAvailabilityDetails(msrest.serialization.Model): """Transport options availability details for given region. @@ -3109,7 +3855,7 @@ class TransportAvailabilityDetails(msrest.serialization.Model): :ivar shipment_type: Transport Shipment Type supported for given region. Possible values include: "CustomerManaged", "MicrosoftManaged". - :vartype shipment_type: str or ~azure.mgmt.databox.models.TransportShipmentTypes + :vartype shipment_type: str or ~data_box_management_client.models.TransportShipmentTypes """ _validation = { @@ -3133,7 +3879,7 @@ class TransportAvailabilityRequest(msrest.serialization.Model): :param sku_name: Type of the device. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type sku_name: str or ~azure.mgmt.databox.models.SkuName + :type sku_name: str or ~data_box_management_client.models.SkuName """ _attribute_map = { @@ -3155,7 +3901,7 @@ class TransportAvailabilityResponse(msrest.serialization.Model): :ivar transport_availability_details: List of transport availability details for given region. :vartype transport_availability_details: - list[~azure.mgmt.databox.models.TransportAvailabilityDetails] + list[~data_box_management_client.models.TransportAvailabilityDetails] """ _validation = { @@ -3181,7 +3927,7 @@ class TransportPreferences(msrest.serialization.Model): :param preferred_shipment_type: Required. Indicates Shipment Logistics type that the customer preferred. Possible values include: "CustomerManaged", "MicrosoftManaged". - :type preferred_shipment_type: str or ~azure.mgmt.databox.models.TransportShipmentTypes + :type preferred_shipment_type: str or ~data_box_management_client.models.TransportShipmentTypes """ _validation = { @@ -3208,7 +3954,7 @@ class UnencryptedCredentials(msrest.serialization.Model): :ivar job_name: Name of the job. :vartype job_name: str :ivar job_secrets: Secrets related to this job. - :vartype job_secrets: ~azure.mgmt.databox.models.JobSecrets + :vartype job_secrets: ~data_box_management_client.models.JobSecrets """ _validation = { @@ -3234,7 +3980,7 @@ class UnencryptedCredentialsList(msrest.serialization.Model): """List of unencrypted credentials for accessing device. :param value: List of unencrypted credentials. - :type value: list[~azure.mgmt.databox.models.UnencryptedCredentials] + :type value: list[~data_box_management_client.models.UnencryptedCredentials] :param next_link: Link for the next set of unencrypted credentials. :type next_link: str """ @@ -3257,14 +4003,17 @@ class UpdateJobDetails(msrest.serialization.Model): """Job details for update. :param contact_details: Contact details for notification and shipping. - :type contact_details: ~azure.mgmt.databox.models.ContactDetails + :type contact_details: ~data_box_management_client.models.ContactDetails :param shipping_address: Shipping address of the customer. - :type shipping_address: ~azure.mgmt.databox.models.ShippingAddress + :type shipping_address: ~data_box_management_client.models.ShippingAddress + :param key_encryption_key: Key encryption key for the job. + :type key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey """ _attribute_map = { 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, } def __init__( @@ -3274,6 +4023,56 @@ def __init__( super(UpdateJobDetails, self).__init__(**kwargs) self.contact_details = kwargs.get('contact_details', None) self.shipping_address = kwargs.get('shipping_address', None) + self.key_encryption_key = kwargs.get('key_encryption_key', None) + + +class UserAssignedIdentity(msrest.serialization.Model): + """Class defining User assigned identity details. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar principal_id: The principal id of user assigned identity. + :vartype principal_id: str + :ivar client_id: The client id of user assigned identity. + :vartype client_id: str + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'client_id': {'readonly': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'client_id': {'key': 'clientId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(UserAssignedIdentity, self).__init__(**kwargs) + self.principal_id = None + self.client_id = None + + +class UserAssignedProperties(msrest.serialization.Model): + """User assigned identity properties. + + :param resource_id: Arm resource id for user assigned identity to be used to fetch MSI token. + :type resource_id: str + """ + + _attribute_map = { + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(UserAssignedProperties, self).__init__(**kwargs) + self.resource_id = kwargs.get('resource_id', None) class ValidateAddress(ValidationInputRequest): @@ -3282,17 +4081,17 @@ class ValidateAddress(ValidationInputRequest): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :param shipping_address: Required. Shipping address of the customer. - :type shipping_address: ~azure.mgmt.databox.models.ShippingAddress + :type shipping_address: ~data_box_management_client.models.ShippingAddress :param device_type: Required. Device type to be used for the job. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type device_type: str or ~azure.mgmt.databox.models.SkuName + :type device_type: str or ~data_box_management_client.models.SkuName :param transport_preferences: Preferences related to the shipment logistics of the sku. - :type transport_preferences: ~azure.mgmt.databox.models.TransportPreferences + :type transport_preferences: ~data_box_management_client.models.TransportPreferences """ _validation = { @@ -3326,10 +4125,11 @@ class ValidationResponse(msrest.serialization.Model): :ivar status: Overall validation status. Possible values include: "AllValidToProceed", "InputsRevisitRequired", "CertainInputValidationsSkipped". - :vartype status: str or ~azure.mgmt.databox.models.OverallValidationStatus + :vartype status: str or ~data_box_management_client.models.OverallValidationStatus :ivar individual_response_details: List of response details contain validationType and its response as key and value respectively. - :vartype individual_response_details: list[~azure.mgmt.databox.models.ValidationInputResponse] + :vartype individual_response_details: + list[~data_box_management_client.models.ValidationInputResponse] """ _validation = { diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/models/_models_py3.py b/src/databox/azext_databox/vendored_sdks/databox/models/_models_py3.py similarity index 60% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/models/_models_py3.py rename to src/databox/azext_databox/vendored_sdks/databox/models/_models_py3.py index 89c41698459..2c2bc23d5e3 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/models/_models_py3.py +++ b/src/databox/azext_databox/vendored_sdks/databox/models/_models_py3.py @@ -9,6 +9,7 @@ import datetime from typing import Dict, List, Optional, Union +from azure.core.exceptions import HttpResponseError import msrest.serialization from ._data_box_management_client_enums import * @@ -21,26 +22,27 @@ class AccountCredentialDetails(msrest.serialization.Model): :ivar account_name: Name of the account. :vartype account_name: str - :ivar data_destination_type: Data Destination Type. Possible values include: "StorageAccount", + :ivar data_account_type: Type of the account. Possible values include: "StorageAccount", "ManagedDisk". - :vartype data_destination_type: str or ~azure.mgmt.databox.models.DataDestinationType + :vartype data_account_type: str or ~data_box_management_client.models.DataAccountType :ivar account_connection_string: Connection string of the account endpoint to use the account as a storage endpoint on the device. :vartype account_connection_string: str :ivar share_credential_details: Per share level unencrypted access credentials. - :vartype share_credential_details: list[~azure.mgmt.databox.models.ShareCredentialDetails] + :vartype share_credential_details: + list[~data_box_management_client.models.ShareCredentialDetails] """ _validation = { 'account_name': {'readonly': True}, - 'data_destination_type': {'readonly': True}, + 'data_account_type': {'readonly': True}, 'account_connection_string': {'readonly': True}, 'share_credential_details': {'readonly': True}, } _attribute_map = { 'account_name': {'key': 'accountName', 'type': 'str'}, - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, 'account_connection_string': {'key': 'accountConnectionString', 'type': 'str'}, 'share_credential_details': {'key': 'shareCredentialDetails', 'type': '[ShareCredentialDetails]'}, } @@ -51,28 +53,54 @@ def __init__( ): super(AccountCredentialDetails, self).__init__(**kwargs) self.account_name = None - self.data_destination_type = None + self.data_account_type = None self.account_connection_string = None self.share_credential_details = None +class AdditionalErrorInfo(msrest.serialization.Model): + """Additional error info. + + :param type: Additional error type. + :type type: str + :param info: Additional error info. + :type info: object + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'info': {'key': 'info', 'type': 'object'}, + } + + def __init__( + self, + *, + type: Optional[str] = None, + info: Optional[object] = None, + **kwargs + ): + super(AdditionalErrorInfo, self).__init__(**kwargs) + self.type = type + self.info = info + + class AddressValidationOutput(msrest.serialization.Model): """Output of the address validation api. Variables are only populated by the server, and will be ignored when sending a request. :param validation_type: Identifies the type of validation response.Constant filled by server. - Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", - "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", + "ValidatePreferences", "ValidateCreateOrderLimit", "ValidateSkuAvailability", + "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar validation_status: The address validation status. Possible values include: "Valid", "Invalid", "Ambiguous". - :vartype validation_status: str or ~azure.mgmt.databox.models.AddressValidationStatus + :vartype validation_status: str or ~data_box_management_client.models.AddressValidationStatus :ivar alternate_addresses: List of alternate addresses. - :vartype alternate_addresses: list[~azure.mgmt.databox.models.ShippingAddress] + :vartype alternate_addresses: list[~data_box_management_client.models.ShippingAddress] """ _validation = { @@ -83,7 +111,7 @@ class AddressValidationOutput(msrest.serialization.Model): _attribute_map = { 'validation_type': {'key': 'properties.validationType', 'type': 'str'}, - 'error': {'key': 'properties.error', 'type': 'Error'}, + 'error': {'key': 'properties.error', 'type': 'CloudError'}, 'validation_status': {'key': 'properties.validationStatus', 'type': 'str'}, 'alternate_addresses': {'key': 'properties.alternateAddresses', 'type': '[ShippingAddress]'}, } @@ -103,19 +131,19 @@ class ValidationInputResponse(msrest.serialization.Model): """Minimum properties that should be present in each individual validation response. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AddressValidationProperties, CreateOrderLimitForSubscriptionValidationResponseProperties, DataDestinationDetailsValidationResponseProperties, PreferencesValidationResponseProperties, SkuAvailabilityValidationResponseProperties, SubscriptionIsAllowedToCreateJobValidationResponseProperties. + sub-classes are: AddressValidationProperties, CreateOrderLimitForSubscriptionValidationResponseProperties, DataTransferDetailsValidationResponseProperties, PreferencesValidationResponseProperties, SkuAvailabilityValidationResponseProperties, SubscriptionIsAllowedToCreateJobValidationResponseProperties. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error + :vartype error: ~data_box_management_client.models.CloudError """ _validation = { @@ -125,11 +153,11 @@ class ValidationInputResponse(msrest.serialization.Model): _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, } _subtype_map = { - 'validation_type': {'ValidateAddress': 'AddressValidationProperties', 'ValidateCreateOrderLimit': 'CreateOrderLimitForSubscriptionValidationResponseProperties', 'ValidateDataDestinationDetails': 'DataDestinationDetailsValidationResponseProperties', 'ValidatePreferences': 'PreferencesValidationResponseProperties', 'ValidateSkuAvailability': 'SkuAvailabilityValidationResponseProperties', 'ValidateSubscriptionIsAllowedToCreateJob': 'SubscriptionIsAllowedToCreateJobValidationResponseProperties'} + 'validation_type': {'ValidateAddress': 'AddressValidationProperties', 'ValidateCreateOrderLimit': 'CreateOrderLimitForSubscriptionValidationResponseProperties', 'ValidateDataTransferDetails': 'DataTransferDetailsValidationResponseProperties', 'ValidatePreferences': 'PreferencesValidationResponseProperties', 'ValidateSkuAvailability': 'SkuAvailabilityValidationResponseProperties', 'ValidateSubscriptionIsAllowedToCreateJob': 'SubscriptionIsAllowedToCreateJobValidationResponseProperties'} } def __init__( @@ -149,17 +177,17 @@ class AddressValidationProperties(ValidationInputResponse): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar validation_status: The address validation status. Possible values include: "Valid", "Invalid", "Ambiguous". - :vartype validation_status: str or ~azure.mgmt.databox.models.AddressValidationStatus + :vartype validation_status: str or ~data_box_management_client.models.AddressValidationStatus :ivar alternate_addresses: List of alternate addresses. - :vartype alternate_addresses: list[~azure.mgmt.databox.models.ShippingAddress] + :vartype alternate_addresses: list[~data_box_management_client.models.ShippingAddress] """ _validation = { @@ -171,7 +199,7 @@ class AddressValidationProperties(ValidationInputResponse): _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'validation_status': {'key': 'validationStatus', 'type': 'str'}, 'alternate_addresses': {'key': 'alternateAddresses', 'type': '[ShippingAddress]'}, } @@ -186,6 +214,33 @@ def __init__( self.alternate_addresses = None +class ApiError(msrest.serialization.Model): + """ApiError. + + All required parameters must be populated in order to send to Azure. + + :param error: Required. + :type error: ~data_box_management_client.models.ErrorDetail + """ + + _validation = { + 'error': {'required': True}, + } + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorDetail'}, + } + + def __init__( + self, + *, + error: "ErrorDetail", + **kwargs + ): + super(ApiError, self).__init__(**kwargs) + self.error = error + + class ApplianceNetworkConfiguration(msrest.serialization.Model): """The Network Adapter configuration of a DataBox. @@ -254,12 +309,11 @@ def __init__( class AvailableSkuRequest(msrest.serialization.Model): """The filters for showing the available skus. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :ivar transfer_type: Required. Type of the transfer. Default value: "ImportToAzure". - :vartype transfer_type: str + :param transfer_type: Required. Type of the transfer. Possible values include: "ImportToAzure", + "ExportFromAzure". + :type transfer_type: str or ~data_box_management_client.models.TransferType :param country: Required. ISO country code. Country for hardware shipment. For codes check: https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements. :type country: str @@ -267,11 +321,11 @@ class AvailableSkuRequest(msrest.serialization.Model): https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. :type location: str :param sku_names: Sku Names to filter for available skus. - :type sku_names: list[str or ~azure.mgmt.databox.models.SkuName] + :type sku_names: list[str or ~data_box_management_client.models.SkuName] """ _validation = { - 'transfer_type': {'required': True, 'constant': True}, + 'transfer_type': {'required': True}, 'country': {'required': True}, 'location': {'required': True}, } @@ -283,17 +337,17 @@ class AvailableSkuRequest(msrest.serialization.Model): 'sku_names': {'key': 'skuNames', 'type': '[str]'}, } - transfer_type = "ImportToAzure" - def __init__( self, *, + transfer_type: Union[str, "TransferType"], country: str, location: str, sku_names: Optional[List[Union[str, "SkuName"]]] = None, **kwargs ): super(AvailableSkuRequest, self).__init__(**kwargs) + self.transfer_type = transfer_type self.country = country self.location = location self.sku_names = sku_names @@ -305,7 +359,7 @@ class AvailableSkusResult(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. :ivar value: List of available skus. - :vartype value: list[~azure.mgmt.databox.models.SkuInformation] + :vartype value: list[~data_box_management_client.models.SkuInformation] :param next_link: Link for the next set of skus. :type next_link: str """ @@ -330,6 +384,68 @@ def __init__( self.next_link = next_link +class AzureFileFilterDetails(msrest.serialization.Model): + """Filter details to transfer Azure files. + + :param file_prefix_list: Prefix list of the Azure files to be transferred. + :type file_prefix_list: list[str] + :param file_path_list: List of full path of the files to be transferred. + :type file_path_list: list[str] + :param file_share_list: List of file shares to be transferred. + :type file_share_list: list[str] + """ + + _attribute_map = { + 'file_prefix_list': {'key': 'filePrefixList', 'type': '[str]'}, + 'file_path_list': {'key': 'filePathList', 'type': '[str]'}, + 'file_share_list': {'key': 'fileShareList', 'type': '[str]'}, + } + + def __init__( + self, + *, + file_prefix_list: Optional[List[str]] = None, + file_path_list: Optional[List[str]] = None, + file_share_list: Optional[List[str]] = None, + **kwargs + ): + super(AzureFileFilterDetails, self).__init__(**kwargs) + self.file_prefix_list = file_prefix_list + self.file_path_list = file_path_list + self.file_share_list = file_share_list + + +class BlobFilterDetails(msrest.serialization.Model): + """Filter details to transfer Azure Blobs. + + :param blob_prefix_list: Prefix list of the Azure blobs to be transferred. + :type blob_prefix_list: list[str] + :param blob_path_list: List of full path of the blobs to be transferred. + :type blob_path_list: list[str] + :param container_list: List of blob containers to be transferred. + :type container_list: list[str] + """ + + _attribute_map = { + 'blob_prefix_list': {'key': 'blobPrefixList', 'type': '[str]'}, + 'blob_path_list': {'key': 'blobPathList', 'type': '[str]'}, + 'container_list': {'key': 'containerList', 'type': '[str]'}, + } + + def __init__( + self, + *, + blob_prefix_list: Optional[List[str]] = None, + blob_path_list: Optional[List[str]] = None, + container_list: Optional[List[str]] = None, + **kwargs + ): + super(BlobFilterDetails, self).__init__(**kwargs) + self.blob_prefix_list = blob_prefix_list + self.blob_path_list = blob_path_list + self.container_list = container_list + + class CancellationReason(msrest.serialization.Model): """Reason for cancellation. @@ -358,23 +474,25 @@ def __init__( class CloudError(msrest.serialization.Model): - """The error information object. + """Cloud error. Variables are only populated by the server, and will be ignored when sending a request. - :ivar code: Error code string. - :vartype code: str - :ivar message: Descriptive error information. - :vartype message: str - :param target: Error target. + :param code: Cloud error code. + :type code: str + :param message: Cloud error message. + :type message: str + :param target: Cloud error target. :type target: str - :param details: More detailed error information. - :type details: list[~azure.mgmt.databox.models.CloudError] + :ivar details: Cloud error details. + :vartype details: list[~data_box_management_client.models.CloudError] + :ivar additional_info: Cloud error additional info. + :vartype additional_info: list[~data_box_management_client.models.AdditionalErrorInfo] """ _validation = { - 'code': {'readonly': True}, - 'message': {'readonly': True}, + 'details': {'readonly': True}, + 'additional_info': {'readonly': True}, } _attribute_map = { @@ -382,20 +500,23 @@ class CloudError(msrest.serialization.Model): 'message': {'key': 'message', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, 'details': {'key': 'details', 'type': '[CloudError]'}, + 'additional_info': {'key': 'additionalInfo', 'type': '[AdditionalErrorInfo]'}, } def __init__( self, *, + code: Optional[str] = None, + message: Optional[str] = None, target: Optional[str] = None, - details: Optional[List["CloudError"]] = None, **kwargs ): super(CloudError, self).__init__(**kwargs) - self.code = None - self.message = None + self.code = code + self.message = message self.target = target - self.details = details + self.details = None + self.additional_info = None class ContactDetails(msrest.serialization.Model): @@ -414,7 +535,7 @@ class ContactDetails(msrest.serialization.Model): :param email_list: Required. List of Email-ids to be notified about job progress. :type email_list: list[str] :param notification_preference: Notification preference for a job stage. - :type notification_preference: list[~azure.mgmt.databox.models.NotificationPreference] + :type notification_preference: list[~data_box_management_client.models.NotificationPreference] """ _validation = { @@ -462,7 +583,7 @@ class CopyLogDetails(msrest.serialization.Model): :param copy_log_details_type: Required. Indicates the type of job details.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type copy_log_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator + :type copy_log_details_type: str or ~data_box_management_client.models.ClassDiscriminator """ _validation = { @@ -490,20 +611,24 @@ class CopyProgress(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :ivar storage_account_name: Name of the storage account where the data needs to be uploaded. + :ivar storage_account_name: Name of the storage account. This will be empty for data account + types other than storage account. :vartype storage_account_name: str - :ivar data_destination_type: Data Destination Type. Possible values include: "StorageAccount", + :ivar transfer_type: Transfer type of data. Possible values include: "ImportToAzure", + "ExportFromAzure". + :vartype transfer_type: str or ~data_box_management_client.models.TransferType + :ivar data_account_type: Data Account Type. Possible values include: "StorageAccount", "ManagedDisk". - :vartype data_destination_type: str or ~azure.mgmt.databox.models.DataDestinationType + :vartype data_account_type: str or ~data_box_management_client.models.DataAccountType :ivar account_id: Id of the account where the data needs to be uploaded. :vartype account_id: str - :ivar bytes_sent_to_cloud: Amount of data uploaded by the job as of now. - :vartype bytes_sent_to_cloud: long + :ivar bytes_processed: To indicate bytes transferred. + :vartype bytes_processed: long :ivar total_bytes_to_process: Total amount of data to be processed by the job. :vartype total_bytes_to_process: long - :ivar files_processed: Number of files processed by the job as of now. + :ivar files_processed: Number of files processed. :vartype files_processed: long - :ivar total_files_to_process: Total number of files to be processed by the job. + :ivar total_files_to_process: Total files to process. :vartype total_files_to_process: long :ivar invalid_files_processed: Number of files not adhering to azure naming conventions which were processed by automatic renaming. @@ -516,13 +641,21 @@ class CopyProgress(msrest.serialization.Model): :vartype renamed_container_count: long :ivar files_errored_out: Number of files which could not be copied. :vartype files_errored_out: long + :ivar directories_errored_out: To indicate directories errored out in the job. + :vartype directories_errored_out: long + :ivar invalid_directories_processed: To indicate directories renamed. + :vartype invalid_directories_processed: long + :ivar is_enumeration_in_progress: To indicate if enumeration of data is in progress. + Until this is true, the TotalBytesToProcess may not be valid. + :vartype is_enumeration_in_progress: bool """ _validation = { 'storage_account_name': {'readonly': True}, - 'data_destination_type': {'readonly': True}, + 'transfer_type': {'readonly': True}, + 'data_account_type': {'readonly': True}, 'account_id': {'readonly': True}, - 'bytes_sent_to_cloud': {'readonly': True}, + 'bytes_processed': {'readonly': True}, 'total_bytes_to_process': {'readonly': True}, 'files_processed': {'readonly': True}, 'total_files_to_process': {'readonly': True}, @@ -530,13 +663,17 @@ class CopyProgress(msrest.serialization.Model): 'invalid_file_bytes_uploaded': {'readonly': True}, 'renamed_container_count': {'readonly': True}, 'files_errored_out': {'readonly': True}, + 'directories_errored_out': {'readonly': True}, + 'invalid_directories_processed': {'readonly': True}, + 'is_enumeration_in_progress': {'readonly': True}, } _attribute_map = { 'storage_account_name': {'key': 'storageAccountName', 'type': 'str'}, - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, + 'transfer_type': {'key': 'transferType', 'type': 'str'}, + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, 'account_id': {'key': 'accountId', 'type': 'str'}, - 'bytes_sent_to_cloud': {'key': 'bytesSentToCloud', 'type': 'long'}, + 'bytes_processed': {'key': 'bytesProcessed', 'type': 'long'}, 'total_bytes_to_process': {'key': 'totalBytesToProcess', 'type': 'long'}, 'files_processed': {'key': 'filesProcessed', 'type': 'long'}, 'total_files_to_process': {'key': 'totalFilesToProcess', 'type': 'long'}, @@ -544,6 +681,9 @@ class CopyProgress(msrest.serialization.Model): 'invalid_file_bytes_uploaded': {'key': 'invalidFileBytesUploaded', 'type': 'long'}, 'renamed_container_count': {'key': 'renamedContainerCount', 'type': 'long'}, 'files_errored_out': {'key': 'filesErroredOut', 'type': 'long'}, + 'directories_errored_out': {'key': 'directoriesErroredOut', 'type': 'long'}, + 'invalid_directories_processed': {'key': 'invalidDirectoriesProcessed', 'type': 'long'}, + 'is_enumeration_in_progress': {'key': 'isEnumerationInProgress', 'type': 'bool'}, } def __init__( @@ -552,9 +692,10 @@ def __init__( ): super(CopyProgress, self).__init__(**kwargs) self.storage_account_name = None - self.data_destination_type = None + self.transfer_type = None + self.data_account_type = None self.account_id = None - self.bytes_sent_to_cloud = None + self.bytes_processed = None self.total_bytes_to_process = None self.files_processed = None self.total_files_to_process = None @@ -562,32 +703,36 @@ def __init__( self.invalid_file_bytes_uploaded = None self.renamed_container_count = None self.files_errored_out = None + self.directories_errored_out = None + self.invalid_directories_processed = None + self.is_enumeration_in_progress = None class ValidationRequest(msrest.serialization.Model): - """Input request for all pre job creation validation. + """Minimum request requirement of any validation category. You probably want to use the sub-classes and not this class directly. Known sub-classes are: CreateJobValidations. All required parameters must be populated in order to send to Azure. - :param individual_request_details: Required. List of request details contain validationType and - its request as key and value respectively. - :type individual_request_details: list[~azure.mgmt.databox.models.ValidationInputRequest] :param validation_category: Required. Identify the nature of validation.Constant filled by server. :type validation_category: str + :param individual_request_details: Required. List of request details contain validationType and + its request as key and value respectively. + :type individual_request_details: + list[~data_box_management_client.models.ValidationInputRequest] """ _validation = { - 'individual_request_details': {'required': True}, 'validation_category': {'required': True}, + 'individual_request_details': {'required': True}, } _attribute_map = { - 'individual_request_details': {'key': 'individualRequestDetails', 'type': '[ValidationInputRequest]'}, 'validation_category': {'key': 'validationCategory', 'type': 'str'}, + 'individual_request_details': {'key': 'individualRequestDetails', 'type': '[ValidationInputRequest]'}, } _subtype_map = { @@ -601,8 +746,8 @@ def __init__( **kwargs ): super(ValidationRequest, self).__init__(**kwargs) - self.individual_request_details = individual_request_details self.validation_category = None # type: Optional[str] + self.individual_request_details = individual_request_details class CreateJobValidations(ValidationRequest): @@ -610,22 +755,23 @@ class CreateJobValidations(ValidationRequest): All required parameters must be populated in order to send to Azure. - :param individual_request_details: Required. List of request details contain validationType and - its request as key and value respectively. - :type individual_request_details: list[~azure.mgmt.databox.models.ValidationInputRequest] :param validation_category: Required. Identify the nature of validation.Constant filled by server. :type validation_category: str + :param individual_request_details: Required. List of request details contain validationType and + its request as key and value respectively. + :type individual_request_details: + list[~data_box_management_client.models.ValidationInputRequest] """ _validation = { - 'individual_request_details': {'required': True}, 'validation_category': {'required': True}, + 'individual_request_details': {'required': True}, } _attribute_map = { - 'individual_request_details': {'key': 'individualRequestDetails', 'type': '[ValidationInputRequest]'}, 'validation_category': {'key': 'validationCategory', 'type': 'str'}, + 'individual_request_details': {'key': 'individualRequestDetails', 'type': '[ValidationInputRequest]'}, } def __init__( @@ -642,15 +788,15 @@ class ValidationInputRequest(msrest.serialization.Model): """Minimum fields that must be present in any type of validation request. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ValidateAddress, CreateOrderLimitForSubscriptionValidationRequest, DataDestinationDetailsValidationRequest, PreferencesValidationRequest, SkuAvailabilityValidationRequest, SubscriptionIsAllowedToCreateJobValidationRequest. + sub-classes are: ValidateAddress, CreateOrderLimitForSubscriptionValidationRequest, DataTransferDetailsValidationRequest, PreferencesValidationRequest, SkuAvailabilityValidationRequest, SubscriptionIsAllowedToCreateJobValidationRequest. All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator """ _validation = { @@ -662,7 +808,7 @@ class ValidationInputRequest(msrest.serialization.Model): } _subtype_map = { - 'validation_type': {'ValidateAddress': 'ValidateAddress', 'ValidateCreateOrderLimit': 'CreateOrderLimitForSubscriptionValidationRequest', 'ValidateDataDestinationDetails': 'DataDestinationDetailsValidationRequest', 'ValidatePreferences': 'PreferencesValidationRequest', 'ValidateSkuAvailability': 'SkuAvailabilityValidationRequest', 'ValidateSubscriptionIsAllowedToCreateJob': 'SubscriptionIsAllowedToCreateJobValidationRequest'} + 'validation_type': {'ValidateAddress': 'ValidateAddress', 'ValidateCreateOrderLimit': 'CreateOrderLimitForSubscriptionValidationRequest', 'ValidateDataTransferDetails': 'DataTransferDetailsValidationRequest', 'ValidatePreferences': 'PreferencesValidationRequest', 'ValidateSkuAvailability': 'SkuAvailabilityValidationRequest', 'ValidateSubscriptionIsAllowedToCreateJob': 'SubscriptionIsAllowedToCreateJobValidationRequest'} } def __init__( @@ -679,13 +825,13 @@ class CreateOrderLimitForSubscriptionValidationRequest(ValidationInputRequest): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :param device_type: Required. Device type to be used for the job. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type device_type: str or ~azure.mgmt.databox.models.SkuName + :type device_type: str or ~data_box_management_client.models.SkuName """ _validation = { @@ -717,15 +863,15 @@ class CreateOrderLimitForSubscriptionValidationResponseProperties(ValidationInpu All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar status: Create order limit validation status. Possible values include: "Valid", "Invalid", "Skipped". - :vartype status: str or ~azure.mgmt.databox.models.ValidationStatus + :vartype status: str or ~data_box_management_client.models.ValidationStatus """ _validation = { @@ -736,7 +882,7 @@ class CreateOrderLimitForSubscriptionValidationResponseProperties(ValidationInpu _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'status': {'key': 'status', 'type': 'str'}, } @@ -749,6 +895,50 @@ def __init__( self.status = None +class DataAccountDetails(msrest.serialization.Model): + """Account details of the data to be transferred. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ManagedDiskDetails, StorageAccountDetails. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Account Type of the data to be transferred.Constant filled + by server. Possible values include: "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param share_password: Password for all the shares to be created on the device. Should not be + passed for TransferType:ExportFromAzure jobs. If this is not passed, the service will generate + password itself. This will not be returned in Get Call. Password Requirements : Password must + be minimum of 12 and maximum of 64 characters. Password must have at least one uppercase + alphabet, one number and one special character. Password cannot have the following characters : + IilLoO0 Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. + :type share_password: str + """ + + _validation = { + 'data_account_type': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'share_password': {'key': 'sharePassword', 'type': 'str'}, + } + + _subtype_map = { + 'data_account_type': {'ManagedDisk': 'ManagedDiskDetails', 'StorageAccount': 'StorageAccountDetails'} + } + + def __init__( + self, + *, + share_password: Optional[str] = None, + **kwargs + ): + super(DataAccountDetails, self).__init__(**kwargs) + self.data_account_type = None # type: Optional[str] + self.share_password = share_password + + class DataBoxAccountCopyLogDetails(CopyLogDetails): """Copy log details for a storage account of a DataBox job. @@ -758,23 +948,28 @@ class DataBoxAccountCopyLogDetails(CopyLogDetails): :param copy_log_details_type: Required. Indicates the type of job details.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type copy_log_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :ivar account_name: Destination account name. + :type copy_log_details_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar account_name: Account name. :vartype account_name: str :ivar copy_log_link: Link for copy logs. :vartype copy_log_link: str + :ivar copy_verbose_log_link: Link for copy verbose logs. This will be set only when + LogCollectionLevel is set to Verbose. + :vartype copy_verbose_log_link: str """ _validation = { 'copy_log_details_type': {'required': True}, 'account_name': {'readonly': True}, 'copy_log_link': {'readonly': True}, + 'copy_verbose_log_link': {'readonly': True}, } _attribute_map = { 'copy_log_details_type': {'key': 'copyLogDetailsType', 'type': 'str'}, 'account_name': {'key': 'accountName', 'type': 'str'}, 'copy_log_link': {'key': 'copyLogLink', 'type': 'str'}, + 'copy_verbose_log_link': {'key': 'copyVerboseLogLink', 'type': 'str'}, } def __init__( @@ -785,6 +980,7 @@ def __init__( self.copy_log_details_type = 'DataBox' # type: str self.account_name = None self.copy_log_link = None + self.copy_verbose_log_link = None class DataBoxDiskCopyLogDetails(CopyLogDetails): @@ -796,7 +992,7 @@ class DataBoxDiskCopyLogDetails(CopyLogDetails): :param copy_log_details_type: Required. Indicates the type of job details.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type copy_log_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator + :type copy_log_details_type: str or ~data_box_management_client.models.ClassDiscriminator :ivar disk_serial_number: Disk Serial Number. :vartype disk_serial_number: str :ivar error_log_link: Link for copy error logs. @@ -844,7 +1040,7 @@ class DataBoxDiskCopyProgress(msrest.serialization.Model): :ivar status: The Status of the copy. Possible values include: "NotStarted", "InProgress", "Completed", "CompletedWithErrors", "Failed", "NotReturned", "HardwareError", "DeviceFormatted", "DeviceMetadataModified", "StorageAccountNotAccessible", "UnsupportedData". - :vartype status: str or ~azure.mgmt.databox.models.CopyStatus + :vartype status: str or ~data_box_management_client.models.CopyStatus """ _validation = { @@ -882,64 +1078,73 @@ class JobDetails(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param expected_data_size_in_terabytes: The expected size of the data, which needs to be - transferred in this job, in terabytes. - :type expected_data_size_in_terabytes: int :ivar job_stages: List of stages that run in the job. - :vartype job_stages: list[~azure.mgmt.databox.models.JobStages] + :vartype job_stages: list[~data_box_management_client.models.JobStages] :param contact_details: Required. Contact details for notification and shipping. - :type contact_details: ~azure.mgmt.databox.models.ContactDetails - :param shipping_address: Required. Shipping address of the customer. - :type shipping_address: ~azure.mgmt.databox.models.ShippingAddress + :type contact_details: ~data_box_management_client.models.ContactDetails + :param shipping_address: Shipping address of the customer. + :type shipping_address: ~data_box_management_client.models.ShippingAddress :ivar delivery_package: Delivery package shipping details. - :vartype delivery_package: ~azure.mgmt.databox.models.PackageShippingDetails + :vartype delivery_package: ~data_box_management_client.models.PackageShippingDetails :ivar return_package: Return package shipping details. - :vartype return_package: ~azure.mgmt.databox.models.PackageShippingDetails - :param destination_account_details: Required. Destination account details. - :type destination_account_details: list[~azure.mgmt.databox.models.DestinationAccountDetails] - :ivar error_details: Error details for failure. This is optional. - :vartype error_details: list[~azure.mgmt.databox.models.JobErrorDetails] + :vartype return_package: ~data_box_management_client.models.PackageShippingDetails + :param data_import_details: Details of the data to be imported into azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param data_export_details: Details of the data to be exported from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] :param job_details_type: Required. Indicates the type of job details.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator + :type job_details_type: str or ~data_box_management_client.models.ClassDiscriminator :param preferences: Preferences for the order. - :type preferences: ~azure.mgmt.databox.models.Preferences + :type preferences: ~data_box_management_client.models.Preferences :ivar copy_log_details: List of copy log details. - :vartype copy_log_details: list[~azure.mgmt.databox.models.CopyLogDetails] + :vartype copy_log_details: list[~data_box_management_client.models.CopyLogDetails] :ivar reverse_shipment_label_sas_key: Shared access key to download the return shipment label. :vartype reverse_shipment_label_sas_key: str :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. :vartype chain_of_custody_sas_key: str + :param key_encryption_key: Details about which key encryption type is being used. + :type key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey + :param expected_data_size_in_tera_bytes: The expected size of the data, which needs to be + transferred in this job, in terabytes. + :type expected_data_size_in_tera_bytes: int + :ivar actions: Available actions on the job. + :vartype actions: list[str or ~data_box_management_client.models.CustomerResolutionCode] + :ivar last_mitigation_action_on_job: Last mitigation action performed on the job. + :vartype last_mitigation_action_on_job: + ~data_box_management_client.models.LastMitigationActionOnJob """ _validation = { 'job_stages': {'readonly': True}, 'contact_details': {'required': True}, - 'shipping_address': {'required': True}, 'delivery_package': {'readonly': True}, 'return_package': {'readonly': True}, - 'destination_account_details': {'required': True}, - 'error_details': {'readonly': True}, 'job_details_type': {'required': True}, 'copy_log_details': {'readonly': True}, 'reverse_shipment_label_sas_key': {'readonly': True}, 'chain_of_custody_sas_key': {'readonly': True}, + 'actions': {'readonly': True}, + 'last_mitigation_action_on_job': {'readonly': True}, } _attribute_map = { - 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, 'preferences': {'key': 'preferences', 'type': 'Preferences'}, 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, + 'expected_data_size_in_tera_bytes': {'key': 'expectedDataSizeInTeraBytes', 'type': 'int'}, + 'actions': {'key': 'actions', 'type': '[str]'}, + 'last_mitigation_action_on_job': {'key': 'lastMitigationActionOnJob', 'type': 'LastMitigationActionOnJob'}, } _subtype_map = { @@ -950,26 +1155,31 @@ def __init__( self, *, contact_details: "ContactDetails", - shipping_address: "ShippingAddress", - destination_account_details: List["DestinationAccountDetails"], - expected_data_size_in_terabytes: Optional[int] = None, + shipping_address: Optional["ShippingAddress"] = None, + data_import_details: Optional[List["DataImportDetails"]] = None, + data_export_details: Optional[List["DataExportDetails"]] = None, preferences: Optional["Preferences"] = None, + key_encryption_key: Optional["KeyEncryptionKey"] = None, + expected_data_size_in_tera_bytes: Optional[int] = None, **kwargs ): super(JobDetails, self).__init__(**kwargs) - self.expected_data_size_in_terabytes = expected_data_size_in_terabytes self.job_stages = None self.contact_details = contact_details self.shipping_address = shipping_address self.delivery_package = None self.return_package = None - self.destination_account_details = destination_account_details - self.error_details = None + self.data_import_details = data_import_details + self.data_export_details = data_export_details self.job_details_type = None # type: Optional[str] self.preferences = preferences self.copy_log_details = None self.reverse_shipment_label_sas_key = None self.chain_of_custody_sas_key = None + self.key_encryption_key = key_encryption_key + self.expected_data_size_in_tera_bytes = expected_data_size_in_tera_bytes + self.actions = None + self.last_mitigation_action_on_job = None class DataBoxDiskJobDetails(JobDetails): @@ -979,40 +1189,47 @@ class DataBoxDiskJobDetails(JobDetails): All required parameters must be populated in order to send to Azure. - :param expected_data_size_in_terabytes: The expected size of the data, which needs to be - transferred in this job, in terabytes. - :type expected_data_size_in_terabytes: int :ivar job_stages: List of stages that run in the job. - :vartype job_stages: list[~azure.mgmt.databox.models.JobStages] + :vartype job_stages: list[~data_box_management_client.models.JobStages] :param contact_details: Required. Contact details for notification and shipping. - :type contact_details: ~azure.mgmt.databox.models.ContactDetails - :param shipping_address: Required. Shipping address of the customer. - :type shipping_address: ~azure.mgmt.databox.models.ShippingAddress + :type contact_details: ~data_box_management_client.models.ContactDetails + :param shipping_address: Shipping address of the customer. + :type shipping_address: ~data_box_management_client.models.ShippingAddress :ivar delivery_package: Delivery package shipping details. - :vartype delivery_package: ~azure.mgmt.databox.models.PackageShippingDetails + :vartype delivery_package: ~data_box_management_client.models.PackageShippingDetails :ivar return_package: Return package shipping details. - :vartype return_package: ~azure.mgmt.databox.models.PackageShippingDetails - :param destination_account_details: Required. Destination account details. - :type destination_account_details: list[~azure.mgmt.databox.models.DestinationAccountDetails] - :ivar error_details: Error details for failure. This is optional. - :vartype error_details: list[~azure.mgmt.databox.models.JobErrorDetails] + :vartype return_package: ~data_box_management_client.models.PackageShippingDetails + :param data_import_details: Details of the data to be imported into azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param data_export_details: Details of the data to be exported from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] :param job_details_type: Required. Indicates the type of job details.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator + :type job_details_type: str or ~data_box_management_client.models.ClassDiscriminator :param preferences: Preferences for the order. - :type preferences: ~azure.mgmt.databox.models.Preferences + :type preferences: ~data_box_management_client.models.Preferences :ivar copy_log_details: List of copy log details. - :vartype copy_log_details: list[~azure.mgmt.databox.models.CopyLogDetails] + :vartype copy_log_details: list[~data_box_management_client.models.CopyLogDetails] :ivar reverse_shipment_label_sas_key: Shared access key to download the return shipment label. :vartype reverse_shipment_label_sas_key: str :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. :vartype chain_of_custody_sas_key: str + :param key_encryption_key: Details about which key encryption type is being used. + :type key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey + :param expected_data_size_in_tera_bytes: The expected size of the data, which needs to be + transferred in this job, in terabytes. + :type expected_data_size_in_tera_bytes: int + :ivar actions: Available actions on the job. + :vartype actions: list[str or ~data_box_management_client.models.CustomerResolutionCode] + :ivar last_mitigation_action_on_job: Last mitigation action performed on the job. + :vartype last_mitigation_action_on_job: + ~data_box_management_client.models.LastMitigationActionOnJob :param preferred_disks: User preference on what size disks are needed for the job. The map is from the disk size in TB to the count. Eg. {2,5} means 5 disks of 2 TB size. Key is string but will be checked against an int. :type preferred_disks: dict[str, int] :ivar copy_progress: Copy progress per disk. - :vartype copy_progress: list[~azure.mgmt.databox.models.DataBoxDiskCopyProgress] + :vartype copy_progress: list[~data_box_management_client.models.DataBoxDiskCopyProgress] :ivar disks_and_size_details: Contains the map of disk serial number to the disk size being used for the job. Is returned only after the disks are shipped to the customer. :vartype disks_and_size_details: dict[str, int] @@ -1023,33 +1240,35 @@ class DataBoxDiskJobDetails(JobDetails): _validation = { 'job_stages': {'readonly': True}, 'contact_details': {'required': True}, - 'shipping_address': {'required': True}, 'delivery_package': {'readonly': True}, 'return_package': {'readonly': True}, - 'destination_account_details': {'required': True}, - 'error_details': {'readonly': True}, 'job_details_type': {'required': True}, 'copy_log_details': {'readonly': True}, 'reverse_shipment_label_sas_key': {'readonly': True}, 'chain_of_custody_sas_key': {'readonly': True}, + 'actions': {'readonly': True}, + 'last_mitigation_action_on_job': {'readonly': True}, 'copy_progress': {'readonly': True}, 'disks_and_size_details': {'readonly': True}, } _attribute_map = { - 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, 'preferences': {'key': 'preferences', 'type': 'Preferences'}, 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, + 'expected_data_size_in_tera_bytes': {'key': 'expectedDataSizeInTeraBytes', 'type': 'int'}, + 'actions': {'key': 'actions', 'type': '[str]'}, + 'last_mitigation_action_on_job': {'key': 'lastMitigationActionOnJob', 'type': 'LastMitigationActionOnJob'}, 'preferred_disks': {'key': 'preferredDisks', 'type': '{int}'}, 'copy_progress': {'key': 'copyProgress', 'type': '[DataBoxDiskCopyProgress]'}, 'disks_and_size_details': {'key': 'disksAndSizeDetails', 'type': '{int}'}, @@ -1060,15 +1279,17 @@ def __init__( self, *, contact_details: "ContactDetails", - shipping_address: "ShippingAddress", - destination_account_details: List["DestinationAccountDetails"], - expected_data_size_in_terabytes: Optional[int] = None, + shipping_address: Optional["ShippingAddress"] = None, + data_import_details: Optional[List["DataImportDetails"]] = None, + data_export_details: Optional[List["DataExportDetails"]] = None, preferences: Optional["Preferences"] = None, + key_encryption_key: Optional["KeyEncryptionKey"] = None, + expected_data_size_in_tera_bytes: Optional[int] = None, preferred_disks: Optional[Dict[str, int]] = None, passkey: Optional[str] = None, **kwargs ): - super(DataBoxDiskJobDetails, self).__init__(expected_data_size_in_terabytes=expected_data_size_in_terabytes, contact_details=contact_details, shipping_address=shipping_address, destination_account_details=destination_account_details, preferences=preferences, **kwargs) + super(DataBoxDiskJobDetails, self).__init__(contact_details=contact_details, shipping_address=shipping_address, data_import_details=data_import_details, data_export_details=data_export_details, preferences=preferences, key_encryption_key=key_encryption_key, expected_data_size_in_tera_bytes=expected_data_size_in_tera_bytes, **kwargs) self.job_details_type = 'DataBoxDisk' # type: str self.preferred_disks = preferred_disks self.copy_progress = None @@ -1082,22 +1303,29 @@ class JobSecrets(msrest.serialization.Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: DataboxJobSecrets, DataBoxDiskJobSecrets, DataBoxHeavyJobSecrets. + Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_secrets_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :param dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. - :type dc_access_security_code: ~azure.mgmt.databox.models.DcAccessSecurityCode + :type job_secrets_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. + :vartype dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar error: Error while fetching the secrets. + :vartype error: ~data_box_management_client.models.CloudError """ _validation = { 'job_secrets_type': {'required': True}, + 'dc_access_security_code': {'readonly': True}, + 'error': {'readonly': True}, } _attribute_map = { 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, + 'error': {'key': 'error', 'type': 'CloudError'}, } _subtype_map = { @@ -1106,13 +1334,12 @@ class JobSecrets(msrest.serialization.Model): def __init__( self, - *, - dc_access_security_code: Optional["DcAccessSecurityCode"] = None, **kwargs ): super(JobSecrets, self).__init__(**kwargs) self.job_secrets_type = None # type: Optional[str] - self.dc_access_security_code = dc_access_security_code + self.dc_access_security_code = None + self.error = None class DataBoxDiskJobSecrets(JobSecrets): @@ -1124,11 +1351,13 @@ class DataBoxDiskJobSecrets(JobSecrets): :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_secrets_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :param dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. - :type dc_access_security_code: ~azure.mgmt.databox.models.DcAccessSecurityCode + :type job_secrets_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. + :vartype dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar error: Error while fetching the secrets. + :vartype error: ~data_box_management_client.models.CloudError :ivar disk_secrets: Contains the list of secrets object for that device. - :vartype disk_secrets: list[~azure.mgmt.databox.models.DiskSecret] + :vartype disk_secrets: list[~data_box_management_client.models.DiskSecret] :ivar pass_key: PassKey for the disk Job. :vartype pass_key: str :ivar is_passkey_user_defined: Whether passkey was provided by user. @@ -1137,6 +1366,8 @@ class DataBoxDiskJobSecrets(JobSecrets): _validation = { 'job_secrets_type': {'required': True}, + 'dc_access_security_code': {'readonly': True}, + 'error': {'readonly': True}, 'disk_secrets': {'readonly': True}, 'pass_key': {'readonly': True}, 'is_passkey_user_defined': {'readonly': True}, @@ -1145,6 +1376,7 @@ class DataBoxDiskJobSecrets(JobSecrets): _attribute_map = { 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'disk_secrets': {'key': 'diskSecrets', 'type': '[DiskSecret]'}, 'pass_key': {'key': 'passKey', 'type': 'str'}, 'is_passkey_user_defined': {'key': 'isPasskeyUserDefined', 'type': 'bool'}, @@ -1152,11 +1384,9 @@ class DataBoxDiskJobSecrets(JobSecrets): def __init__( self, - *, - dc_access_security_code: Optional["DcAccessSecurityCode"] = None, **kwargs ): - super(DataBoxDiskJobSecrets, self).__init__(dc_access_security_code=dc_access_security_code, **kwargs) + super(DataBoxDiskJobSecrets, self).__init__(**kwargs) self.job_secrets_type = 'DataBoxDisk' # type: str self.disk_secrets = None self.pass_key = None @@ -1172,23 +1402,28 @@ class DataBoxHeavyAccountCopyLogDetails(CopyLogDetails): :param copy_log_details_type: Required. Indicates the type of job details.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type copy_log_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :ivar account_name: Destination account name. + :type copy_log_details_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar account_name: Account name. :vartype account_name: str :ivar copy_log_link: Link for copy logs. :vartype copy_log_link: list[str] + :ivar copy_verbose_log_link: Link for copy verbose logs. This will be set only when the + LogCollectionLevel is set to verbose. + :vartype copy_verbose_log_link: list[str] """ _validation = { 'copy_log_details_type': {'required': True}, 'account_name': {'readonly': True}, 'copy_log_link': {'readonly': True}, + 'copy_verbose_log_link': {'readonly': True}, } _attribute_map = { 'copy_log_details_type': {'key': 'copyLogDetailsType', 'type': 'str'}, 'account_name': {'key': 'accountName', 'type': 'str'}, 'copy_log_link': {'key': 'copyLogLink', 'type': '[str]'}, + 'copy_verbose_log_link': {'key': 'copyVerboseLogLink', 'type': '[str]'}, } def __init__( @@ -1199,6 +1434,7 @@ def __init__( self.copy_log_details_type = 'DataBoxHeavy' # type: str self.account_name = None self.copy_log_link = None + self.copy_verbose_log_link = None class DataBoxHeavyJobDetails(JobDetails): @@ -1208,69 +1444,83 @@ class DataBoxHeavyJobDetails(JobDetails): All required parameters must be populated in order to send to Azure. - :param expected_data_size_in_terabytes: The expected size of the data, which needs to be - transferred in this job, in terabytes. - :type expected_data_size_in_terabytes: int :ivar job_stages: List of stages that run in the job. - :vartype job_stages: list[~azure.mgmt.databox.models.JobStages] + :vartype job_stages: list[~data_box_management_client.models.JobStages] :param contact_details: Required. Contact details for notification and shipping. - :type contact_details: ~azure.mgmt.databox.models.ContactDetails - :param shipping_address: Required. Shipping address of the customer. - :type shipping_address: ~azure.mgmt.databox.models.ShippingAddress + :type contact_details: ~data_box_management_client.models.ContactDetails + :param shipping_address: Shipping address of the customer. + :type shipping_address: ~data_box_management_client.models.ShippingAddress :ivar delivery_package: Delivery package shipping details. - :vartype delivery_package: ~azure.mgmt.databox.models.PackageShippingDetails + :vartype delivery_package: ~data_box_management_client.models.PackageShippingDetails :ivar return_package: Return package shipping details. - :vartype return_package: ~azure.mgmt.databox.models.PackageShippingDetails - :param destination_account_details: Required. Destination account details. - :type destination_account_details: list[~azure.mgmt.databox.models.DestinationAccountDetails] - :ivar error_details: Error details for failure. This is optional. - :vartype error_details: list[~azure.mgmt.databox.models.JobErrorDetails] + :vartype return_package: ~data_box_management_client.models.PackageShippingDetails + :param data_import_details: Details of the data to be imported into azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param data_export_details: Details of the data to be exported from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] :param job_details_type: Required. Indicates the type of job details.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator + :type job_details_type: str or ~data_box_management_client.models.ClassDiscriminator :param preferences: Preferences for the order. - :type preferences: ~azure.mgmt.databox.models.Preferences + :type preferences: ~data_box_management_client.models.Preferences :ivar copy_log_details: List of copy log details. - :vartype copy_log_details: list[~azure.mgmt.databox.models.CopyLogDetails] + :vartype copy_log_details: list[~data_box_management_client.models.CopyLogDetails] :ivar reverse_shipment_label_sas_key: Shared access key to download the return shipment label. :vartype reverse_shipment_label_sas_key: str :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. :vartype chain_of_custody_sas_key: str + :param key_encryption_key: Details about which key encryption type is being used. + :type key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey + :param expected_data_size_in_tera_bytes: The expected size of the data, which needs to be + transferred in this job, in terabytes. + :type expected_data_size_in_tera_bytes: int + :ivar actions: Available actions on the job. + :vartype actions: list[str or ~data_box_management_client.models.CustomerResolutionCode] + :ivar last_mitigation_action_on_job: Last mitigation action performed on the job. + :vartype last_mitigation_action_on_job: + ~data_box_management_client.models.LastMitigationActionOnJob :ivar copy_progress: Copy progress per account. - :vartype copy_progress: list[~azure.mgmt.databox.models.CopyProgress] - :param device_password: Set Device password for unlocking Databox Heavy. + :vartype copy_progress: list[~data_box_management_client.models.CopyProgress] + :param device_password: Set Device password for unlocking Databox Heavy. Should not be passed + for TransferType:ExportFromAzure jobs. If this is not passed, the service will generate + password itself. This will not be returned in Get Call. Password Requirements : Password must + be minimum of 12 and maximum of 64 characters. Password must have at least one uppercase + alphabet, one number and one special character. Password cannot have the following characters : + IilLoO0 Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. :type device_password: str """ _validation = { 'job_stages': {'readonly': True}, 'contact_details': {'required': True}, - 'shipping_address': {'required': True}, 'delivery_package': {'readonly': True}, 'return_package': {'readonly': True}, - 'destination_account_details': {'required': True}, - 'error_details': {'readonly': True}, 'job_details_type': {'required': True}, 'copy_log_details': {'readonly': True}, 'reverse_shipment_label_sas_key': {'readonly': True}, 'chain_of_custody_sas_key': {'readonly': True}, + 'actions': {'readonly': True}, + 'last_mitigation_action_on_job': {'readonly': True}, 'copy_progress': {'readonly': True}, } _attribute_map = { - 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, 'preferences': {'key': 'preferences', 'type': 'Preferences'}, 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, + 'expected_data_size_in_tera_bytes': {'key': 'expectedDataSizeInTeraBytes', 'type': 'int'}, + 'actions': {'key': 'actions', 'type': '[str]'}, + 'last_mitigation_action_on_job': {'key': 'lastMitigationActionOnJob', 'type': 'LastMitigationActionOnJob'}, 'copy_progress': {'key': 'copyProgress', 'type': '[CopyProgress]'}, 'device_password': {'key': 'devicePassword', 'type': 'str'}, } @@ -1279,14 +1529,16 @@ def __init__( self, *, contact_details: "ContactDetails", - shipping_address: "ShippingAddress", - destination_account_details: List["DestinationAccountDetails"], - expected_data_size_in_terabytes: Optional[int] = None, + shipping_address: Optional["ShippingAddress"] = None, + data_import_details: Optional[List["DataImportDetails"]] = None, + data_export_details: Optional[List["DataExportDetails"]] = None, preferences: Optional["Preferences"] = None, + key_encryption_key: Optional["KeyEncryptionKey"] = None, + expected_data_size_in_tera_bytes: Optional[int] = None, device_password: Optional[str] = None, **kwargs ): - super(DataBoxHeavyJobDetails, self).__init__(expected_data_size_in_terabytes=expected_data_size_in_terabytes, contact_details=contact_details, shipping_address=shipping_address, destination_account_details=destination_account_details, preferences=preferences, **kwargs) + super(DataBoxHeavyJobDetails, self).__init__(contact_details=contact_details, shipping_address=shipping_address, data_import_details=data_import_details, data_export_details=data_export_details, preferences=preferences, key_encryption_key=key_encryption_key, expected_data_size_in_tera_bytes=expected_data_size_in_tera_bytes, **kwargs) self.job_details_type = 'DataBoxHeavy' # type: str self.copy_progress = None self.device_password = device_password @@ -1301,31 +1553,34 @@ class DataBoxHeavyJobSecrets(JobSecrets): :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_secrets_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :param dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. - :type dc_access_security_code: ~azure.mgmt.databox.models.DcAccessSecurityCode + :type job_secrets_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. + :vartype dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar error: Error while fetching the secrets. + :vartype error: ~data_box_management_client.models.CloudError :ivar cabinet_pod_secrets: Contains the list of secret objects for a databox heavy job. - :vartype cabinet_pod_secrets: list[~azure.mgmt.databox.models.DataBoxHeavySecret] + :vartype cabinet_pod_secrets: list[~data_box_management_client.models.DataBoxHeavySecret] """ _validation = { 'job_secrets_type': {'required': True}, + 'dc_access_security_code': {'readonly': True}, + 'error': {'readonly': True}, 'cabinet_pod_secrets': {'readonly': True}, } _attribute_map = { 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'cabinet_pod_secrets': {'key': 'cabinetPodSecrets', 'type': '[DataBoxHeavySecret]'}, } def __init__( self, - *, - dc_access_security_code: Optional["DcAccessSecurityCode"] = None, **kwargs ): - super(DataBoxHeavyJobSecrets, self).__init__(dc_access_security_code=dc_access_security_code, **kwargs) + super(DataBoxHeavyJobSecrets, self).__init__(**kwargs) self.job_secrets_type = 'DataBoxHeavy' # type: str self.cabinet_pod_secrets = None @@ -1340,12 +1595,14 @@ class DataBoxHeavySecret(msrest.serialization.Model): :ivar device_password: Password for out of the box experience on device. :vartype device_password: str :ivar network_configurations: Network configuration of the appliance. - :vartype network_configurations: list[~azure.mgmt.databox.models.ApplianceNetworkConfiguration] + :vartype network_configurations: + list[~data_box_management_client.models.ApplianceNetworkConfiguration] :ivar encoded_validation_cert_pub_key: The base 64 encoded public key to authenticate with the device. :vartype encoded_validation_cert_pub_key: str :ivar account_credential_details: Per account level access credentials. - :vartype account_credential_details: list[~azure.mgmt.databox.models.AccountCredentialDetails] + :vartype account_credential_details: + list[~data_box_management_client.models.AccountCredentialDetails] """ _validation = { @@ -1383,69 +1640,83 @@ class DataBoxJobDetails(JobDetails): All required parameters must be populated in order to send to Azure. - :param expected_data_size_in_terabytes: The expected size of the data, which needs to be - transferred in this job, in terabytes. - :type expected_data_size_in_terabytes: int :ivar job_stages: List of stages that run in the job. - :vartype job_stages: list[~azure.mgmt.databox.models.JobStages] + :vartype job_stages: list[~data_box_management_client.models.JobStages] :param contact_details: Required. Contact details for notification and shipping. - :type contact_details: ~azure.mgmt.databox.models.ContactDetails - :param shipping_address: Required. Shipping address of the customer. - :type shipping_address: ~azure.mgmt.databox.models.ShippingAddress + :type contact_details: ~data_box_management_client.models.ContactDetails + :param shipping_address: Shipping address of the customer. + :type shipping_address: ~data_box_management_client.models.ShippingAddress :ivar delivery_package: Delivery package shipping details. - :vartype delivery_package: ~azure.mgmt.databox.models.PackageShippingDetails + :vartype delivery_package: ~data_box_management_client.models.PackageShippingDetails :ivar return_package: Return package shipping details. - :vartype return_package: ~azure.mgmt.databox.models.PackageShippingDetails - :param destination_account_details: Required. Destination account details. - :type destination_account_details: list[~azure.mgmt.databox.models.DestinationAccountDetails] - :ivar error_details: Error details for failure. This is optional. - :vartype error_details: list[~azure.mgmt.databox.models.JobErrorDetails] + :vartype return_package: ~data_box_management_client.models.PackageShippingDetails + :param data_import_details: Details of the data to be imported into azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param data_export_details: Details of the data to be exported from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] :param job_details_type: Required. Indicates the type of job details.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator + :type job_details_type: str or ~data_box_management_client.models.ClassDiscriminator :param preferences: Preferences for the order. - :type preferences: ~azure.mgmt.databox.models.Preferences + :type preferences: ~data_box_management_client.models.Preferences :ivar copy_log_details: List of copy log details. - :vartype copy_log_details: list[~azure.mgmt.databox.models.CopyLogDetails] + :vartype copy_log_details: list[~data_box_management_client.models.CopyLogDetails] :ivar reverse_shipment_label_sas_key: Shared access key to download the return shipment label. :vartype reverse_shipment_label_sas_key: str :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. :vartype chain_of_custody_sas_key: str + :param key_encryption_key: Details about which key encryption type is being used. + :type key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey + :param expected_data_size_in_tera_bytes: The expected size of the data, which needs to be + transferred in this job, in terabytes. + :type expected_data_size_in_tera_bytes: int + :ivar actions: Available actions on the job. + :vartype actions: list[str or ~data_box_management_client.models.CustomerResolutionCode] + :ivar last_mitigation_action_on_job: Last mitigation action performed on the job. + :vartype last_mitigation_action_on_job: + ~data_box_management_client.models.LastMitigationActionOnJob :ivar copy_progress: Copy progress per storage account. - :vartype copy_progress: list[~azure.mgmt.databox.models.CopyProgress] - :param device_password: Set Device password for unlocking Databox. + :vartype copy_progress: list[~data_box_management_client.models.CopyProgress] + :param device_password: Set Device password for unlocking Databox. Should not be passed for + TransferType:ExportFromAzure jobs. If this is not passed, the service will generate password + itself. This will not be returned in Get Call. Password Requirements : Password must be + minimum of 12 and maximum of 64 characters. Password must have at least one uppercase alphabet, + one number and one special character. Password cannot have the following characters : IilLoO0 + Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. :type device_password: str """ _validation = { 'job_stages': {'readonly': True}, 'contact_details': {'required': True}, - 'shipping_address': {'required': True}, 'delivery_package': {'readonly': True}, 'return_package': {'readonly': True}, - 'destination_account_details': {'required': True}, - 'error_details': {'readonly': True}, 'job_details_type': {'required': True}, 'copy_log_details': {'readonly': True}, 'reverse_shipment_label_sas_key': {'readonly': True}, 'chain_of_custody_sas_key': {'readonly': True}, + 'actions': {'readonly': True}, + 'last_mitigation_action_on_job': {'readonly': True}, 'copy_progress': {'readonly': True}, } _attribute_map = { - 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, 'preferences': {'key': 'preferences', 'type': 'Preferences'}, 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, + 'expected_data_size_in_tera_bytes': {'key': 'expectedDataSizeInTeraBytes', 'type': 'int'}, + 'actions': {'key': 'actions', 'type': '[str]'}, + 'last_mitigation_action_on_job': {'key': 'lastMitigationActionOnJob', 'type': 'LastMitigationActionOnJob'}, 'copy_progress': {'key': 'copyProgress', 'type': '[CopyProgress]'}, 'device_password': {'key': 'devicePassword', 'type': 'str'}, } @@ -1454,14 +1725,16 @@ def __init__( self, *, contact_details: "ContactDetails", - shipping_address: "ShippingAddress", - destination_account_details: List["DestinationAccountDetails"], - expected_data_size_in_terabytes: Optional[int] = None, + shipping_address: Optional["ShippingAddress"] = None, + data_import_details: Optional[List["DataImportDetails"]] = None, + data_export_details: Optional[List["DataExportDetails"]] = None, preferences: Optional["Preferences"] = None, + key_encryption_key: Optional["KeyEncryptionKey"] = None, + expected_data_size_in_tera_bytes: Optional[int] = None, device_password: Optional[str] = None, **kwargs ): - super(DataBoxJobDetails, self).__init__(expected_data_size_in_terabytes=expected_data_size_in_terabytes, contact_details=contact_details, shipping_address=shipping_address, destination_account_details=destination_account_details, preferences=preferences, **kwargs) + super(DataBoxJobDetails, self).__init__(contact_details=contact_details, shipping_address=shipping_address, data_import_details=data_import_details, data_export_details=data_export_details, preferences=preferences, key_encryption_key=key_encryption_key, expected_data_size_in_tera_bytes=expected_data_size_in_tera_bytes, **kwargs) self.job_details_type = 'DataBox' # type: str self.copy_progress = None self.device_password = device_password @@ -1470,35 +1743,41 @@ def __init__( class DataboxJobSecrets(JobSecrets): """The secrets related to a databox job. + Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_secrets_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :param dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. - :type dc_access_security_code: ~azure.mgmt.databox.models.DcAccessSecurityCode + :type job_secrets_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. + :vartype dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar error: Error while fetching the secrets. + :vartype error: ~data_box_management_client.models.CloudError :param pod_secrets: Contains the list of secret objects for a job. - :type pod_secrets: list[~azure.mgmt.databox.models.DataBoxSecret] + :type pod_secrets: list[~data_box_management_client.models.DataBoxSecret] """ _validation = { 'job_secrets_type': {'required': True}, + 'dc_access_security_code': {'readonly': True}, + 'error': {'readonly': True}, } _attribute_map = { 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'pod_secrets': {'key': 'podSecrets', 'type': '[DataBoxSecret]'}, } def __init__( self, *, - dc_access_security_code: Optional["DcAccessSecurityCode"] = None, pod_secrets: Optional[List["DataBoxSecret"]] = None, **kwargs ): - super(DataboxJobSecrets, self).__init__(dc_access_security_code=dc_access_security_code, **kwargs) + super(DataboxJobSecrets, self).__init__(**kwargs) self.job_secrets_type = 'DataBox' # type: str self.pod_secrets = pod_secrets @@ -1511,13 +1790,14 @@ class ScheduleAvailabilityRequest(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param storage_location: Required. Location for data transfer. - For locations check: https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api- - version=2018-01-01. + :param storage_location: Required. Location for data transfer. For locations check: + https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. :type storage_location: str :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type sku_name: str or ~azure.mgmt.databox.models.SkuName + :type sku_name: str or ~data_box_management_client.models.SkuName + :param country: Country in which storage location should be supported. + :type country: str """ _validation = { @@ -1528,6 +1808,7 @@ class ScheduleAvailabilityRequest(msrest.serialization.Model): _attribute_map = { 'storage_location': {'key': 'storageLocation', 'type': 'str'}, 'sku_name': {'key': 'skuName', 'type': 'str'}, + 'country': {'key': 'country', 'type': 'str'}, } _subtype_map = { @@ -1538,11 +1819,13 @@ def __init__( self, *, storage_location: str, + country: Optional[str] = None, **kwargs ): super(ScheduleAvailabilityRequest, self).__init__(**kwargs) self.storage_location = storage_location self.sku_name = None # type: Optional[str] + self.country = country class DataBoxScheduleAvailabilityRequest(ScheduleAvailabilityRequest): @@ -1550,13 +1833,14 @@ class DataBoxScheduleAvailabilityRequest(ScheduleAvailabilityRequest): All required parameters must be populated in order to send to Azure. - :param storage_location: Required. Location for data transfer. - For locations check: https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api- - version=2018-01-01. + :param storage_location: Required. Location for data transfer. For locations check: + https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. :type storage_location: str :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type sku_name: str or ~azure.mgmt.databox.models.SkuName + :type sku_name: str or ~data_box_management_client.models.SkuName + :param country: Country in which storage location should be supported. + :type country: str """ _validation = { @@ -1567,15 +1851,17 @@ class DataBoxScheduleAvailabilityRequest(ScheduleAvailabilityRequest): _attribute_map = { 'storage_location': {'key': 'storageLocation', 'type': 'str'}, 'sku_name': {'key': 'skuName', 'type': 'str'}, + 'country': {'key': 'country', 'type': 'str'}, } def __init__( self, *, storage_location: str, + country: Optional[str] = None, **kwargs ): - super(DataBoxScheduleAvailabilityRequest, self).__init__(storage_location=storage_location, **kwargs) + super(DataBoxScheduleAvailabilityRequest, self).__init__(storage_location=storage_location, country=country, **kwargs) self.sku_name = 'DataBox' # type: str @@ -1589,12 +1875,14 @@ class DataBoxSecret(msrest.serialization.Model): :ivar device_password: Password for out of the box experience on device. :vartype device_password: str :ivar network_configurations: Network configuration of the appliance. - :vartype network_configurations: list[~azure.mgmt.databox.models.ApplianceNetworkConfiguration] + :vartype network_configurations: + list[~data_box_management_client.models.ApplianceNetworkConfiguration] :ivar encoded_validation_cert_pub_key: The base 64 encoded public key to authenticate with the device. :vartype encoded_validation_cert_pub_key: str :ivar account_credential_details: Per account level access credentials. - :vartype account_credential_details: list[~azure.mgmt.databox.models.AccountCredentialDetails] + :vartype account_credential_details: + list[~data_box_management_client.models.AccountCredentialDetails] """ _validation = { @@ -1625,64 +1913,172 @@ def __init__( self.account_credential_details = None -class DataDestinationDetailsValidationRequest(ValidationInputRequest): - """Request to validate data destination details. +class DataExportDetails(msrest.serialization.Model): + """Details of the data to be used for exporting data from azure. + + All required parameters must be populated in order to send to Azure. + + :param transfer_configuration: Required. Configuration for the data transfer. + :type transfer_configuration: ~data_box_management_client.models.TransferConfiguration + :param log_collection_level: Level of the logs to be collected. Possible values include: + "Error", "Verbose". Default value: "Error". + :type log_collection_level: str or ~data_box_management_client.models.LogCollectionLevel + :param account_details: Required. Account details of the data to be transferred. + :type account_details: ~data_box_management_client.models.DataAccountDetails + """ + + _validation = { + 'transfer_configuration': {'required': True}, + 'account_details': {'required': True}, + } + + _attribute_map = { + 'transfer_configuration': {'key': 'transferConfiguration', 'type': 'TransferConfiguration'}, + 'log_collection_level': {'key': 'logCollectionLevel', 'type': 'str'}, + 'account_details': {'key': 'accountDetails', 'type': 'DataAccountDetails'}, + } + + def __init__( + self, + *, + transfer_configuration: "TransferConfiguration", + account_details: "DataAccountDetails", + log_collection_level: Optional[Union[str, "LogCollectionLevel"]] = "Error", + **kwargs + ): + super(DataExportDetails, self).__init__(**kwargs) + self.transfer_configuration = transfer_configuration + self.log_collection_level = log_collection_level + self.account_details = account_details + + +class DataImportDetails(msrest.serialization.Model): + """Details of the data to be used for importing data to azure. + + All required parameters must be populated in order to send to Azure. + + :param account_details: Required. Account details of the data to be transferred. + :type account_details: ~data_box_management_client.models.DataAccountDetails + """ + + _validation = { + 'account_details': {'required': True}, + } + + _attribute_map = { + 'account_details': {'key': 'accountDetails', 'type': 'DataAccountDetails'}, + } + + def __init__( + self, + *, + account_details: "DataAccountDetails", + **kwargs + ): + super(DataImportDetails, self).__init__(**kwargs) + self.account_details = account_details + + +class DataLocationToServiceLocationMap(msrest.serialization.Model): + """Map of data location to service location. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar data_location: Location of the data. + :vartype data_location: str + :ivar service_location: Location of the service. + :vartype service_location: str + """ + + _validation = { + 'data_location': {'readonly': True}, + 'service_location': {'readonly': True}, + } + + _attribute_map = { + 'data_location': {'key': 'dataLocation', 'type': 'str'}, + 'service_location': {'key': 'serviceLocation', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DataLocationToServiceLocationMap, self).__init__(**kwargs) + self.data_location = None + self.service_location = None + + +class DataTransferDetailsValidationRequest(ValidationInputRequest): + """Request to validate export and import data details. All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator - :param destination_account_details: Required. Destination account details list. - :type destination_account_details: list[~azure.mgmt.databox.models.DestinationAccountDetails] - :param location: Required. Location of stamp or geo. - :type location: str + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator + :param data_export_details: List of DataTransfer details to be used to export data from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] + :param data_import_details: List of DataTransfer details to be used to import data to azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param device_type: Required. Device type. Possible values include: "DataBox", "DataBoxDisk", + "DataBoxHeavy". + :type device_type: str or ~data_box_management_client.models.SkuName + :param transfer_type: Required. Type of the transfer. Possible values include: "ImportToAzure", + "ExportFromAzure". + :type transfer_type: str or ~data_box_management_client.models.TransferType """ _validation = { 'validation_type': {'required': True}, - 'destination_account_details': {'required': True}, - 'location': {'required': True}, + 'device_type': {'required': True}, + 'transfer_type': {'required': True}, } _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'location': {'key': 'location', 'type': 'str'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'device_type': {'key': 'deviceType', 'type': 'str'}, + 'transfer_type': {'key': 'transferType', 'type': 'str'}, } def __init__( self, *, - destination_account_details: List["DestinationAccountDetails"], - location: str, + device_type: Union[str, "SkuName"], + transfer_type: Union[str, "TransferType"], + data_export_details: Optional[List["DataExportDetails"]] = None, + data_import_details: Optional[List["DataImportDetails"]] = None, **kwargs ): - super(DataDestinationDetailsValidationRequest, self).__init__(**kwargs) - self.validation_type = 'ValidateDataDestinationDetails' # type: str - self.destination_account_details = destination_account_details - self.location = location + super(DataTransferDetailsValidationRequest, self).__init__(**kwargs) + self.validation_type = 'ValidateDataTransferDetails' # type: str + self.data_export_details = data_export_details + self.data_import_details = data_import_details + self.device_type = device_type + self.transfer_type = transfer_type -class DataDestinationDetailsValidationResponseProperties(ValidationInputResponse): - """Properties of data destination details validation response. +class DataTransferDetailsValidationResponseProperties(ValidationInputResponse): + """Properties of data transfer details validation response. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error - :ivar status: Data destination details validation status. Possible values include: "Valid", + :vartype error: ~data_box_management_client.models.CloudError + :ivar status: Data transfer details validation status. Possible values include: "Valid", "Invalid", "Skipped". - :vartype status: str or ~azure.mgmt.databox.models.ValidationStatus + :vartype status: str or ~data_box_management_client.models.ValidationStatus """ _validation = { @@ -1693,7 +2089,7 @@ class DataDestinationDetailsValidationResponseProperties(ValidationInputResponse _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'status': {'key': 'status', 'type': 'str'}, } @@ -1701,301 +2097,243 @@ def __init__( self, **kwargs ): - super(DataDestinationDetailsValidationResponseProperties, self).__init__(**kwargs) - self.validation_type = 'ValidateDataDestinationDetails' # type: str + super(DataTransferDetailsValidationResponseProperties, self).__init__(**kwargs) + self.validation_type = 'ValidateDataTransferDetails' # type: str self.status = None class DcAccessSecurityCode(msrest.serialization.Model): - """Dc Access Security code for device. + """Dc access security code. - :param forward_dc_access_code: Dc Access Code for dispatching from DC. - :type forward_dc_access_code: str - :param reverse_dc_access_code: Dc Access code for dropping off at DC. + :param reverse_dc_access_code: Reverse Dc access security code. :type reverse_dc_access_code: str + :param forward_dc_access_code: Forward Dc access security code. + :type forward_dc_access_code: str """ _attribute_map = { - 'forward_dc_access_code': {'key': 'forwardDcAccessCode', 'type': 'str'}, - 'reverse_dc_access_code': {'key': 'reverseDcAccessCode', 'type': 'str'}, + 'reverse_dc_access_code': {'key': 'reverseDCAccessCode', 'type': 'str'}, + 'forward_dc_access_code': {'key': 'forwardDCAccessCode', 'type': 'str'}, } def __init__( self, *, - forward_dc_access_code: Optional[str] = None, reverse_dc_access_code: Optional[str] = None, + forward_dc_access_code: Optional[str] = None, **kwargs ): super(DcAccessSecurityCode, self).__init__(**kwargs) - self.forward_dc_access_code = forward_dc_access_code self.reverse_dc_access_code = reverse_dc_access_code + self.forward_dc_access_code = forward_dc_access_code -class DestinationAccountDetails(msrest.serialization.Model): - """Details of the destination storage accounts. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DestinationManagedDiskDetails, DestinationStorageAccountDetails. +class Details(msrest.serialization.Model): + """Details. All required parameters must be populated in order to send to Azure. - :param data_destination_type: Required. Data Destination Type.Constant filled by server. - Possible values include: "StorageAccount", "ManagedDisk". - :type data_destination_type: str or ~azure.mgmt.databox.models.DataDestinationType - :param account_id: Arm Id of the destination where the data has to be moved. - :type account_id: str - :param share_password: Share password to be shared by all shares in SA. - :type share_password: str + :param code: Required. + :type code: str + :param message: Required. + :type message: str """ _validation = { - 'data_destination_type': {'required': True}, + 'code': {'required': True}, + 'message': {'required': True}, } _attribute_map = { - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, - 'account_id': {'key': 'accountId', 'type': 'str'}, - 'share_password': {'key': 'sharePassword', 'type': 'str'}, - } - - _subtype_map = { - 'data_destination_type': {'ManagedDisk': 'DestinationManagedDiskDetails', 'StorageAccount': 'DestinationStorageAccountDetails'} + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, } def __init__( self, *, - account_id: Optional[str] = None, - share_password: Optional[str] = None, + code: str, + message: str, **kwargs ): - super(DestinationAccountDetails, self).__init__(**kwargs) - self.data_destination_type = None # type: Optional[str] - self.account_id = account_id - self.share_password = share_password + super(Details, self).__init__(**kwargs) + self.code = code + self.message = message -class DestinationManagedDiskDetails(DestinationAccountDetails): - """Details for the destination compute disks. +class DiskScheduleAvailabilityRequest(ScheduleAvailabilityRequest): + """Request body to get the availability for scheduling disk orders. All required parameters must be populated in order to send to Azure. - :param data_destination_type: Required. Data Destination Type.Constant filled by server. - Possible values include: "StorageAccount", "ManagedDisk". - :type data_destination_type: str or ~azure.mgmt.databox.models.DataDestinationType - :param account_id: Arm Id of the destination where the data has to be moved. - :type account_id: str - :param share_password: Share password to be shared by all shares in SA. - :type share_password: str - :param resource_group_id: Required. Destination Resource Group Id where the Compute disks - should be created. - :type resource_group_id: str - :param staging_storage_account_id: Required. Arm Id of the storage account that can be used to - copy the vhd for staging. - :type staging_storage_account_id: str + :param storage_location: Required. Location for data transfer. For locations check: + https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. + :type storage_location: str + :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by + server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". + :type sku_name: str or ~data_box_management_client.models.SkuName + :param country: Country in which storage location should be supported. + :type country: str + :param expected_data_size_in_tera_bytes: Required. The expected size of the data, which needs + to be transferred in this job, in terabytes. + :type expected_data_size_in_tera_bytes: int """ _validation = { - 'data_destination_type': {'required': True}, - 'resource_group_id': {'required': True}, - 'staging_storage_account_id': {'required': True}, - } - - _attribute_map = { - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, - 'account_id': {'key': 'accountId', 'type': 'str'}, - 'share_password': {'key': 'sharePassword', 'type': 'str'}, - 'resource_group_id': {'key': 'resourceGroupId', 'type': 'str'}, - 'staging_storage_account_id': {'key': 'stagingStorageAccountId', 'type': 'str'}, - } - - def __init__( - self, - *, - resource_group_id: str, - staging_storage_account_id: str, - account_id: Optional[str] = None, - share_password: Optional[str] = None, - **kwargs - ): - super(DestinationManagedDiskDetails, self).__init__(account_id=account_id, share_password=share_password, **kwargs) - self.data_destination_type = 'ManagedDisk' # type: str - self.resource_group_id = resource_group_id - self.staging_storage_account_id = staging_storage_account_id - - -class DestinationStorageAccountDetails(DestinationAccountDetails): - """Details for the destination storage account. - - All required parameters must be populated in order to send to Azure. - - :param data_destination_type: Required. Data Destination Type.Constant filled by server. - Possible values include: "StorageAccount", "ManagedDisk". - :type data_destination_type: str or ~azure.mgmt.databox.models.DataDestinationType - :param account_id: Arm Id of the destination where the data has to be moved. - :type account_id: str - :param share_password: Share password to be shared by all shares in SA. - :type share_password: str - :param storage_account_id: Required. Destination Storage Account Arm Id. - :type storage_account_id: str - """ - - _validation = { - 'data_destination_type': {'required': True}, - 'storage_account_id': {'required': True}, + 'storage_location': {'required': True}, + 'sku_name': {'required': True}, + 'expected_data_size_in_tera_bytes': {'required': True}, } _attribute_map = { - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, - 'account_id': {'key': 'accountId', 'type': 'str'}, - 'share_password': {'key': 'sharePassword', 'type': 'str'}, - 'storage_account_id': {'key': 'storageAccountId', 'type': 'str'}, + 'storage_location': {'key': 'storageLocation', 'type': 'str'}, + 'sku_name': {'key': 'skuName', 'type': 'str'}, + 'country': {'key': 'country', 'type': 'str'}, + 'expected_data_size_in_tera_bytes': {'key': 'expectedDataSizeInTeraBytes', 'type': 'int'}, } def __init__( self, *, - storage_account_id: str, - account_id: Optional[str] = None, - share_password: Optional[str] = None, + storage_location: str, + expected_data_size_in_tera_bytes: int, + country: Optional[str] = None, **kwargs ): - super(DestinationStorageAccountDetails, self).__init__(account_id=account_id, share_password=share_password, **kwargs) - self.data_destination_type = 'StorageAccount' # type: str - self.storage_account_id = storage_account_id + super(DiskScheduleAvailabilityRequest, self).__init__(storage_location=storage_location, country=country, **kwargs) + self.sku_name = 'DataBoxDisk' # type: str + self.expected_data_size_in_tera_bytes = expected_data_size_in_tera_bytes -class DestinationToServiceLocationMap(msrest.serialization.Model): - """Map of destination location to service location. +class DiskSecret(msrest.serialization.Model): + """Contains all the secrets of a Disk. Variables are only populated by the server, and will be ignored when sending a request. - :ivar destination_location: Location of the destination. - :vartype destination_location: str - :ivar service_location: Location of the service. - :vartype service_location: str + :ivar disk_serial_number: Serial number of the assigned disk. + :vartype disk_serial_number: str + :ivar bit_locker_key: Bit Locker key of the disk which can be used to unlock the disk to copy + data. + :vartype bit_locker_key: str """ _validation = { - 'destination_location': {'readonly': True}, - 'service_location': {'readonly': True}, + 'disk_serial_number': {'readonly': True}, + 'bit_locker_key': {'readonly': True}, } _attribute_map = { - 'destination_location': {'key': 'destinationLocation', 'type': 'str'}, - 'service_location': {'key': 'serviceLocation', 'type': 'str'}, + 'disk_serial_number': {'key': 'diskSerialNumber', 'type': 'str'}, + 'bit_locker_key': {'key': 'bitLockerKey', 'type': 'str'}, } def __init__( self, **kwargs ): - super(DestinationToServiceLocationMap, self).__init__(**kwargs) - self.destination_location = None - self.service_location = None - + super(DiskSecret, self).__init__(**kwargs) + self.disk_serial_number = None + self.bit_locker_key = None -class DiskScheduleAvailabilityRequest(ScheduleAvailabilityRequest): - """Request body to get the availability for scheduling disk orders. - All required parameters must be populated in order to send to Azure. +class EncryptionPreferences(msrest.serialization.Model): + """Preferences related to the Encryption. - :param storage_location: Required. Location for data transfer. - For locations check: https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api- - version=2018-01-01. - :type storage_location: str - :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by - server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type sku_name: str or ~azure.mgmt.databox.models.SkuName - :param expected_data_size_in_terabytes: Required. The expected size of the data, which needs to - be transferred in this job, in terabytes. - :type expected_data_size_in_terabytes: int + :param double_encryption: Defines secondary layer of software-based encryption enablement. + Possible values include: "Enabled", "Disabled". Default value: "Disabled". + :type double_encryption: str or ~data_box_management_client.models.DoubleEncryption """ - _validation = { - 'storage_location': {'required': True}, - 'sku_name': {'required': True}, - 'expected_data_size_in_terabytes': {'required': True}, - } - _attribute_map = { - 'storage_location': {'key': 'storageLocation', 'type': 'str'}, - 'sku_name': {'key': 'skuName', 'type': 'str'}, - 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, + 'double_encryption': {'key': 'doubleEncryption', 'type': 'str'}, } def __init__( self, *, - storage_location: str, - expected_data_size_in_terabytes: int, + double_encryption: Optional[Union[str, "DoubleEncryption"]] = "Disabled", **kwargs ): - super(DiskScheduleAvailabilityRequest, self).__init__(storage_location=storage_location, **kwargs) - self.sku_name = 'DataBoxDisk' # type: str - self.expected_data_size_in_terabytes = expected_data_size_in_terabytes + super(EncryptionPreferences, self).__init__(**kwargs) + self.double_encryption = double_encryption -class DiskSecret(msrest.serialization.Model): - """Contains all the secrets of a Disk. +class ErrorDetail(msrest.serialization.Model): + """ErrorDetail. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar disk_serial_number: Serial number of the assigned disk. - :vartype disk_serial_number: str - :ivar bit_locker_key: Bit Locker key of the disk which can be used to unlock the disk to copy - data. - :vartype bit_locker_key: str + :param code: Required. + :type code: str + :param message: Required. + :type message: str + :param details: + :type details: list[~data_box_management_client.models.Details] + :param target: + :type target: str """ _validation = { - 'disk_serial_number': {'readonly': True}, - 'bit_locker_key': {'readonly': True}, + 'code': {'required': True}, + 'message': {'required': True}, } _attribute_map = { - 'disk_serial_number': {'key': 'diskSerialNumber', 'type': 'str'}, - 'bit_locker_key': {'key': 'bitLockerKey', 'type': 'str'}, + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[Details]'}, + 'target': {'key': 'target', 'type': 'str'}, } def __init__( self, + *, + code: str, + message: str, + details: Optional[List["Details"]] = None, + target: Optional[str] = None, **kwargs ): - super(DiskSecret, self).__init__(**kwargs) - self.disk_serial_number = None - self.bit_locker_key = None + super(ErrorDetail, self).__init__(**kwargs) + self.code = code + self.message = message + self.details = details + self.target = target -class Error(msrest.serialization.Model): - """Top level error for the job. +class FilterFileDetails(msrest.serialization.Model): + """Details of the filter files to be used for data transfer. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar code: Error code that can be used to programmatically identify the error. - :vartype code: str - :ivar message: Describes the error in detail and provides debugging information. - :vartype message: str + :param filter_file_type: Required. Type of the filter file. Possible values include: + "AzureBlob", "AzureFile". + :type filter_file_type: str or ~data_box_management_client.models.FilterFileType + :param filter_file_path: Required. Path of the file that contains the details of all items to + transfer. + :type filter_file_path: str """ _validation = { - 'code': {'readonly': True}, - 'message': {'readonly': True}, + 'filter_file_type': {'required': True}, + 'filter_file_path': {'required': True}, } _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, + 'filter_file_type': {'key': 'filterFileType', 'type': 'str'}, + 'filter_file_path': {'key': 'filterFilePath', 'type': 'str'}, } def __init__( self, + *, + filter_file_type: Union[str, "FilterFileType"], + filter_file_path: str, **kwargs ): - super(Error, self).__init__(**kwargs) - self.code = None - self.message = None + super(FilterFileDetails, self).__init__(**kwargs) + self.filter_file_type = filter_file_type + self.filter_file_path = filter_file_path class HeavyScheduleAvailabilityRequest(ScheduleAvailabilityRequest): @@ -2003,13 +2341,14 @@ class HeavyScheduleAvailabilityRequest(ScheduleAvailabilityRequest): All required parameters must be populated in order to send to Azure. - :param storage_location: Required. Location for data transfer. - For locations check: https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api- - version=2018-01-01. + :param storage_location: Required. Location for data transfer. For locations check: + https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. :type storage_location: str :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type sku_name: str or ~azure.mgmt.databox.models.SkuName + :type sku_name: str or ~data_box_management_client.models.SkuName + :param country: Country in which storage location should be supported. + :type country: str """ _validation = { @@ -2020,77 +2359,65 @@ class HeavyScheduleAvailabilityRequest(ScheduleAvailabilityRequest): _attribute_map = { 'storage_location': {'key': 'storageLocation', 'type': 'str'}, 'sku_name': {'key': 'skuName', 'type': 'str'}, + 'country': {'key': 'country', 'type': 'str'}, } def __init__( self, *, storage_location: str, + country: Optional[str] = None, **kwargs ): - super(HeavyScheduleAvailabilityRequest, self).__init__(storage_location=storage_location, **kwargs) + super(HeavyScheduleAvailabilityRequest, self).__init__(storage_location=storage_location, country=country, **kwargs) self.sku_name = 'DataBoxHeavy' # type: str -class JobDeliveryInfo(msrest.serialization.Model): - """Additional delivery info. +class IdentityProperties(msrest.serialization.Model): + """Managed identity properties. - :param scheduled_date_time: Scheduled date time. - :type scheduled_date_time: ~datetime.datetime + :param type: Managed service identity type. + :type type: str + :param user_assigned: User assigned identity properties. + :type user_assigned: ~data_box_management_client.models.UserAssignedProperties """ _attribute_map = { - 'scheduled_date_time': {'key': 'scheduledDateTime', 'type': 'iso-8601'}, + 'type': {'key': 'type', 'type': 'str'}, + 'user_assigned': {'key': 'userAssigned', 'type': 'UserAssignedProperties'}, } def __init__( self, *, - scheduled_date_time: Optional[datetime.datetime] = None, + type: Optional[str] = None, + user_assigned: Optional["UserAssignedProperties"] = None, **kwargs ): - super(JobDeliveryInfo, self).__init__(**kwargs) - self.scheduled_date_time = scheduled_date_time + super(IdentityProperties, self).__init__(**kwargs) + self.type = type + self.user_assigned = user_assigned -class JobErrorDetails(msrest.serialization.Model): - """Job Error Details for providing the information and recommended action. - - Variables are only populated by the server, and will be ignored when sending a request. +class JobDeliveryInfo(msrest.serialization.Model): + """Additional delivery info. - :ivar error_message: Message for the error. - :vartype error_message: str - :ivar error_code: Code for the error. - :vartype error_code: int - :ivar recommended_action: Recommended action for the error. - :vartype recommended_action: str - :ivar exception_message: Contains the non localized exception message. - :vartype exception_message: str + :param scheduled_date_time: Scheduled date time. + :type scheduled_date_time: ~datetime.datetime """ - _validation = { - 'error_message': {'readonly': True}, - 'error_code': {'readonly': True}, - 'recommended_action': {'readonly': True}, - 'exception_message': {'readonly': True}, - } - _attribute_map = { - 'error_message': {'key': 'errorMessage', 'type': 'str'}, - 'error_code': {'key': 'errorCode', 'type': 'int'}, - 'recommended_action': {'key': 'recommendedAction', 'type': 'str'}, - 'exception_message': {'key': 'exceptionMessage', 'type': 'str'}, + 'scheduled_date_time': {'key': 'scheduledDateTime', 'type': 'iso-8601'}, } def __init__( self, + *, + scheduled_date_time: Optional[datetime.datetime] = None, **kwargs ): - super(JobErrorDetails, self).__init__(**kwargs) - self.error_message = None - self.error_code = None - self.recommended_action = None - self.exception_message = None + super(JobDeliveryInfo, self).__init__(**kwargs) + self.scheduled_date_time = scheduled_date_time class Resource(msrest.serialization.Model): @@ -2107,7 +2434,9 @@ class Resource(msrest.serialization.Model): can be used in viewing and grouping this resource (across resource groups). :type tags: dict[str, str] :param sku: Required. The sku type. - :type sku: ~azure.mgmt.databox.models.Sku + :type sku: ~data_box_management_client.models.Sku + :param identity: Msi identity of the resource. + :type identity: ~data_box_management_client.models.ResourceIdentity """ _validation = { @@ -2119,6 +2448,7 @@ class Resource(msrest.serialization.Model): 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'sku': {'key': 'sku', 'type': 'Sku'}, + 'identity': {'key': 'identity', 'type': 'ResourceIdentity'}, } def __init__( @@ -2127,12 +2457,14 @@ def __init__( location: str, sku: "Sku", tags: Optional[Dict[str, str]] = None, + identity: Optional["ResourceIdentity"] = None, **kwargs ): super(Resource, self).__init__(**kwargs) self.location = location self.tags = tags self.sku = sku + self.identity = identity class JobResource(Resource): @@ -2151,38 +2483,47 @@ class JobResource(Resource): can be used in viewing and grouping this resource (across resource groups). :type tags: dict[str, str] :param sku: Required. The sku type. - :type sku: ~azure.mgmt.databox.models.Sku + :type sku: ~data_box_management_client.models.Sku + :param identity: Msi identity of the resource. + :type identity: ~data_box_management_client.models.ResourceIdentity :ivar name: Name of the object. :vartype name: str :ivar id: Id of the object. :vartype id: str :ivar type: Type of the object. :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~data_box_management_client.models.SystemData + :param transfer_type: Required. Type of the data transfer. Possible values include: + "ImportToAzure", "ExportFromAzure". + :type transfer_type: str or ~data_box_management_client.models.TransferType :ivar is_cancellable: Describes whether the job is cancellable or not. :vartype is_cancellable: bool :ivar is_deletable: Describes whether the job is deletable or not. :vartype is_deletable: bool :ivar is_shipping_address_editable: Describes whether the shipping address is editable or not. :vartype is_shipping_address_editable: bool + :ivar is_prepare_to_ship_enabled: Is Prepare To Ship Enabled on this job. + :vartype is_prepare_to_ship_enabled: bool :ivar status: Name of the stage which is in progress. Possible values include: "DeviceOrdered", "DevicePrepared", "Dispatched", "Delivered", "PickedUp", "AtAzureDC", "DataCopy", "Completed", "CompletedWithErrors", "Cancelled", "Failed_IssueReportedAtCustomer", "Failed_IssueDetectedAtAzureDC", "Aborted", "CompletedWithWarnings", "ReadyToDispatchFromAzureDC", "ReadyToReceiveAtAzureDC". - :vartype status: str or ~azure.mgmt.databox.models.StageName + :vartype status: str or ~data_box_management_client.models.StageName :ivar start_time: Time at which the job was started in UTC ISO 8601 format. :vartype start_time: ~datetime.datetime :ivar error: Top level error for the job. - :vartype error: ~azure.mgmt.databox.models.Error + :vartype error: ~data_box_management_client.models.CloudError :param details: Details of a job run. This field will only be sent for expand details filter. - :type details: ~azure.mgmt.databox.models.JobDetails + :type details: ~data_box_management_client.models.JobDetails :ivar cancellation_reason: Reason for cancellation. :vartype cancellation_reason: str :param delivery_type: Delivery type of Job. Possible values include: "NonScheduled", - "Scheduled". - :type delivery_type: str or ~azure.mgmt.databox.models.JobDeliveryType + "Scheduled". Default value: "NonScheduled". + :type delivery_type: str or ~data_box_management_client.models.JobDeliveryType :param delivery_info: Delivery Info of Job. - :type delivery_info: ~azure.mgmt.databox.models.JobDeliveryInfo + :type delivery_info: ~data_box_management_client.models.JobDeliveryInfo :ivar is_cancellable_without_fee: Flag to indicate cancellation of scheduled job. :vartype is_cancellable_without_fee: bool """ @@ -2193,9 +2534,12 @@ class JobResource(Resource): 'name': {'readonly': True}, 'id': {'readonly': True}, 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'transfer_type': {'required': True}, 'is_cancellable': {'readonly': True}, 'is_deletable': {'readonly': True}, 'is_shipping_address_editable': {'readonly': True}, + 'is_prepare_to_ship_enabled': {'readonly': True}, 'status': {'readonly': True}, 'start_time': {'readonly': True}, 'error': {'readonly': True}, @@ -2207,15 +2551,19 @@ class JobResource(Resource): 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'sku': {'key': 'sku', 'type': 'Sku'}, + 'identity': {'key': 'identity', 'type': 'ResourceIdentity'}, 'name': {'key': 'name', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'transfer_type': {'key': 'properties.transferType', 'type': 'str'}, 'is_cancellable': {'key': 'properties.isCancellable', 'type': 'bool'}, 'is_deletable': {'key': 'properties.isDeletable', 'type': 'bool'}, 'is_shipping_address_editable': {'key': 'properties.isShippingAddressEditable', 'type': 'bool'}, + 'is_prepare_to_ship_enabled': {'key': 'properties.isPrepareToShipEnabled', 'type': 'bool'}, 'status': {'key': 'properties.status', 'type': 'str'}, 'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'}, - 'error': {'key': 'properties.error', 'type': 'Error'}, + 'error': {'key': 'properties.error', 'type': 'CloudError'}, 'details': {'key': 'properties.details', 'type': 'JobDetails'}, 'cancellation_reason': {'key': 'properties.cancellationReason', 'type': 'str'}, 'delivery_type': {'key': 'properties.deliveryType', 'type': 'str'}, @@ -2228,19 +2576,24 @@ def __init__( *, location: str, sku: "Sku", + transfer_type: Union[str, "TransferType"], tags: Optional[Dict[str, str]] = None, + identity: Optional["ResourceIdentity"] = None, details: Optional["JobDetails"] = None, - delivery_type: Optional[Union[str, "JobDeliveryType"]] = None, + delivery_type: Optional[Union[str, "JobDeliveryType"]] = "NonScheduled", delivery_info: Optional["JobDeliveryInfo"] = None, **kwargs ): - super(JobResource, self).__init__(location=location, tags=tags, sku=sku, **kwargs) + super(JobResource, self).__init__(location=location, tags=tags, sku=sku, identity=identity, **kwargs) self.name = None self.id = None self.type = None + self.system_data = None + self.transfer_type = transfer_type self.is_cancellable = None self.is_deletable = None self.is_shipping_address_editable = None + self.is_prepare_to_ship_enabled = None self.status = None self.start_time = None self.error = None @@ -2255,7 +2608,7 @@ class JobResourceList(msrest.serialization.Model): """Job Resource Collection. :param value: List of job resources. - :type value: list[~azure.mgmt.databox.models.JobResource] + :type value: list[~data_box_management_client.models.JobResource] :param next_link: Link for the next set of job resources. :type next_link: str """ @@ -2283,30 +2636,30 @@ class JobResourceUpdateParameter(msrest.serialization.Model): :param tags: A set of tags. The list of key value pairs that describe the resource. These tags can be used in viewing and grouping this resource (across resource groups). :type tags: dict[str, str] + :param identity: Msi identity of the resource. + :type identity: ~data_box_management_client.models.ResourceIdentity :param details: Details of a job to be updated. - :type details: ~azure.mgmt.databox.models.UpdateJobDetails - :param destination_account_details: Destination account details. - :type destination_account_details: list[~azure.mgmt.databox.models.DestinationAccountDetails] + :type details: ~data_box_management_client.models.UpdateJobDetails """ _attribute_map = { 'tags': {'key': 'tags', 'type': '{str}'}, + 'identity': {'key': 'identity', 'type': 'ResourceIdentity'}, 'details': {'key': 'properties.details', 'type': 'UpdateJobDetails'}, - 'destination_account_details': {'key': 'properties.destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, } def __init__( self, *, tags: Optional[Dict[str, str]] = None, + identity: Optional["ResourceIdentity"] = None, details: Optional["UpdateJobDetails"] = None, - destination_account_details: Optional[List["DestinationAccountDetails"]] = None, **kwargs ): super(JobResourceUpdateParameter, self).__init__(**kwargs) self.tags = tags + self.identity = identity self.details = details - self.destination_account_details = destination_account_details class JobStages(msrest.serialization.Model): @@ -2319,18 +2672,17 @@ class JobStages(msrest.serialization.Model): "CompletedWithErrors", "Cancelled", "Failed_IssueReportedAtCustomer", "Failed_IssueDetectedAtAzureDC", "Aborted", "CompletedWithWarnings", "ReadyToDispatchFromAzureDC", "ReadyToReceiveAtAzureDC". - :vartype stage_name: str or ~azure.mgmt.databox.models.StageName + :vartype stage_name: str or ~data_box_management_client.models.StageName :ivar display_name: Display name of the job stage. :vartype display_name: str :ivar stage_status: Status of the job stage. Possible values include: "None", "InProgress", - "Succeeded", "Failed", "Cancelled", "Cancelling", "SucceededWithErrors". - :vartype stage_status: str or ~azure.mgmt.databox.models.StageStatus + "Succeeded", "Failed", "Cancelled", "Cancelling", "SucceededWithErrors", + "WaitingForCustomerAction", "SucceededWithWarnings". + :vartype stage_status: str or ~data_box_management_client.models.StageStatus :ivar stage_time: Time for the job stage in UTC ISO 8601 format. :vartype stage_time: ~datetime.datetime :ivar job_stage_details: Job Stage Details. :vartype job_stage_details: object - :ivar error_details: Error details for the stage. - :vartype error_details: list[~azure.mgmt.databox.models.JobErrorDetails] """ _validation = { @@ -2339,7 +2691,6 @@ class JobStages(msrest.serialization.Model): 'stage_status': {'readonly': True}, 'stage_time': {'readonly': True}, 'job_stage_details': {'readonly': True}, - 'error_details': {'readonly': True}, } _attribute_map = { @@ -2348,7 +2699,6 @@ class JobStages(msrest.serialization.Model): 'stage_status': {'key': 'stageStatus', 'type': 'str'}, 'stage_time': {'key': 'stageTime', 'type': 'iso-8601'}, 'job_stage_details': {'key': 'jobStageDetails', 'type': 'object'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, } def __init__( @@ -2361,7 +2711,161 @@ def __init__( self.stage_status = None self.stage_time = None self.job_stage_details = None - self.error_details = None + + +class KeyEncryptionKey(msrest.serialization.Model): + """Encryption key containing details about key to encrypt different keys. + + All required parameters must be populated in order to send to Azure. + + :param kek_type: Required. Type of encryption key used for key encryption. Possible values + include: "MicrosoftManaged", "CustomerManaged". Default value: "MicrosoftManaged". + :type kek_type: str or ~data_box_management_client.models.KekType + :param identity_properties: Managed identity properties used for key encryption. + :type identity_properties: ~data_box_management_client.models.IdentityProperties + :param kek_url: Key encryption key. It is required in case of Customer managed KekType. + :type kek_url: str + :param kek_vault_resource_id: Kek vault resource id. It is required in case of Customer managed + KekType. + :type kek_vault_resource_id: str + """ + + _validation = { + 'kek_type': {'required': True}, + } + + _attribute_map = { + 'kek_type': {'key': 'kekType', 'type': 'str'}, + 'identity_properties': {'key': 'identityProperties', 'type': 'IdentityProperties'}, + 'kek_url': {'key': 'kekUrl', 'type': 'str'}, + 'kek_vault_resource_id': {'key': 'kekVaultResourceID', 'type': 'str'}, + } + + def __init__( + self, + *, + kek_type: Union[str, "KekType"] = "MicrosoftManaged", + identity_properties: Optional["IdentityProperties"] = None, + kek_url: Optional[str] = None, + kek_vault_resource_id: Optional[str] = None, + **kwargs + ): + super(KeyEncryptionKey, self).__init__(**kwargs) + self.kek_type = kek_type + self.identity_properties = identity_properties + self.kek_url = kek_url + self.kek_vault_resource_id = kek_vault_resource_id + + +class LastMitigationActionOnJob(msrest.serialization.Model): + """Last Mitigation Action Performed On Job. + + :param action_date_time_in_utc: Action performed date time. + :type action_date_time_in_utc: ~datetime.datetime + :param is_performed_by_customer: Action performed by customer, + possibility is that mitigation might happen by customer or service or by ops. + :type is_performed_by_customer: bool + :param customer_resolution: Resolution code provided by customer. Possible values include: + "None", "MoveToCleanUpDevice", "Resume". + :type customer_resolution: str or ~data_box_management_client.models.CustomerResolutionCode + """ + + _attribute_map = { + 'action_date_time_in_utc': {'key': 'actionDateTimeInUtc', 'type': 'iso-8601'}, + 'is_performed_by_customer': {'key': 'isPerformedByCustomer', 'type': 'bool'}, + 'customer_resolution': {'key': 'customerResolution', 'type': 'str'}, + } + + def __init__( + self, + *, + action_date_time_in_utc: Optional[datetime.datetime] = None, + is_performed_by_customer: Optional[bool] = None, + customer_resolution: Optional[Union[str, "CustomerResolutionCode"]] = None, + **kwargs + ): + super(LastMitigationActionOnJob, self).__init__(**kwargs) + self.action_date_time_in_utc = action_date_time_in_utc + self.is_performed_by_customer = is_performed_by_customer + self.customer_resolution = customer_resolution + + +class ManagedDiskDetails(DataAccountDetails): + """Details of the managed disks. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Account Type of the data to be transferred.Constant filled + by server. Possible values include: "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param share_password: Password for all the shares to be created on the device. Should not be + passed for TransferType:ExportFromAzure jobs. If this is not passed, the service will generate + password itself. This will not be returned in Get Call. Password Requirements : Password must + be minimum of 12 and maximum of 64 characters. Password must have at least one uppercase + alphabet, one number and one special character. Password cannot have the following characters : + IilLoO0 Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. + :type share_password: str + :param resource_group_id: Required. Resource Group Id of the compute disks. + :type resource_group_id: str + :param staging_storage_account_id: Required. Resource Id of the storage account that can be + used to copy the vhd for staging. + :type staging_storage_account_id: str + """ + + _validation = { + 'data_account_type': {'required': True}, + 'resource_group_id': {'required': True}, + 'staging_storage_account_id': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'share_password': {'key': 'sharePassword', 'type': 'str'}, + 'resource_group_id': {'key': 'resourceGroupId', 'type': 'str'}, + 'staging_storage_account_id': {'key': 'stagingStorageAccountId', 'type': 'str'}, + } + + def __init__( + self, + *, + resource_group_id: str, + staging_storage_account_id: str, + share_password: Optional[str] = None, + **kwargs + ): + super(ManagedDiskDetails, self).__init__(share_password=share_password, **kwargs) + self.data_account_type = 'ManagedDisk' # type: str + self.resource_group_id = resource_group_id + self.staging_storage_account_id = staging_storage_account_id + + +class MitigateJobRequest(msrest.serialization.Model): + """The Mitigate Job captured from request body for Mitigate API. + + All required parameters must be populated in order to send to Azure. + + :param customer_resolution_code: Required. Resolution code for the job. Possible values + include: "None", "MoveToCleanUpDevice", "Resume". + :type customer_resolution_code: str or + ~data_box_management_client.models.CustomerResolutionCode + """ + + _validation = { + 'customer_resolution_code': {'required': True}, + } + + _attribute_map = { + 'customer_resolution_code': {'key': 'customerResolutionCode', 'type': 'str'}, + } + + def __init__( + self, + *, + customer_resolution_code: Union[str, "CustomerResolutionCode"], + **kwargs + ): + super(MitigateJobRequest, self).__init__(**kwargs) + self.customer_resolution_code = customer_resolution_code class NotificationPreference(msrest.serialization.Model): @@ -2371,7 +2875,7 @@ class NotificationPreference(msrest.serialization.Model): :param stage_name: Required. Name of the stage. Possible values include: "DevicePrepared", "Dispatched", "Delivered", "PickedUp", "AtAzureDC", "DataCopy". - :type stage_name: str or ~azure.mgmt.databox.models.NotificationStageName + :type stage_name: str or ~data_box_management_client.models.NotificationStageName :param send_notification: Required. Notification is required or not. :type send_notification: bool """ @@ -2390,7 +2894,7 @@ def __init__( self, *, stage_name: Union[str, "NotificationStageName"], - send_notification: bool, + send_notification: bool = True, **kwargs ): super(NotificationPreference, self).__init__(**kwargs) @@ -2407,11 +2911,13 @@ class Operation(msrest.serialization.Model): {resourceProviderNamespace}/{resourceType}/{read|write|delete|action}. :vartype name: str :ivar display: Operation display values. - :vartype display: ~azure.mgmt.databox.models.OperationDisplay + :vartype display: ~data_box_management_client.models.OperationDisplay :ivar properties: Operation properties. :vartype properties: object :ivar origin: Origin of the operation. Can be : user|system|user,system. :vartype origin: str + :param is_data_action: Indicates whether the operation is a data action. + :type is_data_action: bool """ _validation = { @@ -2426,10 +2932,13 @@ class Operation(msrest.serialization.Model): 'display': {'key': 'display', 'type': 'OperationDisplay'}, 'properties': {'key': 'properties', 'type': 'object'}, 'origin': {'key': 'origin', 'type': 'str'}, + 'is_data_action': {'key': 'isDataAction', 'type': 'bool'}, } def __init__( self, + *, + is_data_action: Optional[bool] = None, **kwargs ): super(Operation, self).__init__(**kwargs) @@ -2437,6 +2946,7 @@ def __init__( self.display = None self.properties = None self.origin = None + self.is_data_action = is_data_action class OperationDisplay(msrest.serialization.Model): @@ -2481,7 +2991,7 @@ class OperationList(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. :ivar value: List of operations. - :vartype value: list[~azure.mgmt.databox.models.Operation] + :vartype value: list[~data_box_management_client.models.Operation] :param next_link: Link for the next set of operations. :type next_link: str """ @@ -2544,15 +3054,18 @@ def __init__( class Preferences(msrest.serialization.Model): """Preferences related to the order. - :param preferred_data_center_region: Preferred Data Center Region. + :param preferred_data_center_region: Preferred data center region. :type preferred_data_center_region: list[str] :param transport_preferences: Preferences related to the shipment logistics of the sku. - :type transport_preferences: ~azure.mgmt.databox.models.TransportPreferences + :type transport_preferences: ~data_box_management_client.models.TransportPreferences + :param encryption_preferences: Preferences related to the Encryption. + :type encryption_preferences: ~data_box_management_client.models.EncryptionPreferences """ _attribute_map = { 'preferred_data_center_region': {'key': 'preferredDataCenterRegion', 'type': '[str]'}, 'transport_preferences': {'key': 'transportPreferences', 'type': 'TransportPreferences'}, + 'encryption_preferences': {'key': 'encryptionPreferences', 'type': 'EncryptionPreferences'}, } def __init__( @@ -2560,11 +3073,13 @@ def __init__( *, preferred_data_center_region: Optional[List[str]] = None, transport_preferences: Optional["TransportPreferences"] = None, + encryption_preferences: Optional["EncryptionPreferences"] = None, **kwargs ): super(Preferences, self).__init__(**kwargs) self.preferred_data_center_region = preferred_data_center_region self.transport_preferences = transport_preferences + self.encryption_preferences = encryption_preferences class PreferencesValidationRequest(ValidationInputRequest): @@ -2573,15 +3088,15 @@ class PreferencesValidationRequest(ValidationInputRequest): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator - :param preference: Preference requested with respect to transport type and data center. - :type preference: ~azure.mgmt.databox.models.Preferences + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator + :param preference: Preference of transport and data center. + :type preference: ~data_box_management_client.models.Preferences :param device_type: Required. Device type to be used for the job. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type device_type: str or ~azure.mgmt.databox.models.SkuName + :type device_type: str or ~data_box_management_client.models.SkuName """ _validation = { @@ -2616,15 +3131,15 @@ class PreferencesValidationResponseProperties(ValidationInputResponse): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar status: Validation status of requested data center and transport. Possible values include: "Valid", "Invalid", "Skipped". - :vartype status: str or ~azure.mgmt.databox.models.ValidationStatus + :vartype status: str or ~data_box_management_client.models.ValidationStatus """ _validation = { @@ -2635,7 +3150,7 @@ class PreferencesValidationResponseProperties(ValidationInputResponse): _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'status': {'key': 'status', 'type': 'str'}, } @@ -2653,10 +3168,12 @@ class RegionConfigurationRequest(msrest.serialization.Model): :param schedule_availability_request: Request body to get the availability for scheduling orders. - :type schedule_availability_request: ~azure.mgmt.databox.models.ScheduleAvailabilityRequest + :type schedule_availability_request: + ~data_box_management_client.models.ScheduleAvailabilityRequest :param transport_availability_request: Request body to get the transport availability for given sku. - :type transport_availability_request: ~azure.mgmt.databox.models.TransportAvailabilityRequest + :type transport_availability_request: + ~data_box_management_client.models.TransportAvailabilityRequest """ _attribute_map = { @@ -2683,10 +3200,10 @@ class RegionConfigurationResponse(msrest.serialization.Model): :ivar schedule_availability_response: Schedule availability for given sku in a region. :vartype schedule_availability_response: - ~azure.mgmt.databox.models.ScheduleAvailabilityResponse + ~data_box_management_client.models.ScheduleAvailabilityResponse :ivar transport_availability_response: Transport options available for given sku in a region. :vartype transport_availability_response: - ~azure.mgmt.databox.models.TransportAvailabilityResponse + ~data_box_management_client.models.TransportAvailabilityResponse """ _validation = { @@ -2708,8 +3225,50 @@ def __init__( self.transport_availability_response = None +class ResourceIdentity(msrest.serialization.Model): + """Msi identity details of the resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param type: Identity type. + :type type: str + :ivar principal_id: Service Principal Id backing the Msi. + :vartype principal_id: str + :ivar tenant_id: Home Tenant Id. + :vartype tenant_id: str + :param user_assigned_identities: User Assigned Identities. + :type user_assigned_identities: dict[str, + ~data_box_management_client.models.UserAssignedIdentity] + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'}, + } + + def __init__( + self, + *, + type: Optional[str] = "None", + user_assigned_identities: Optional[Dict[str, "UserAssignedIdentity"]] = None, + **kwargs + ): + super(ResourceIdentity, self).__init__(**kwargs) + self.type = type + self.principal_id = None + self.tenant_id = None + self.user_assigned_identities = user_assigned_identities + + class ScheduleAvailabilityResponse(msrest.serialization.Model): - """Schedule availability response for given sku in a region. + """Schedule availability for given sku in a region. Variables are only populated by the server, and will be ignored when sending a request. @@ -2742,13 +3301,14 @@ class ShareCredentialDetails(msrest.serialization.Model): :vartype share_name: str :ivar share_type: Type of the share. Possible values include: "UnknownType", "HCS", "BlockBlob", "PageBlob", "AzureFile", "ManagedDisk". - :vartype share_type: str or ~azure.mgmt.databox.models.ShareDestinationFormatType + :vartype share_type: str or ~data_box_management_client.models.ShareDestinationFormatType :ivar user_name: User name for the share. :vartype user_name: str :ivar password: Password for the share. :vartype password: str :ivar supported_access_protocols: Access protocols supported on the device. - :vartype supported_access_protocols: list[str or ~azure.mgmt.databox.models.AccessProtocol] + :vartype supported_access_protocols: list[str or + ~data_box_management_client.models.AccessProtocol] """ _validation = { @@ -2868,21 +3428,20 @@ class ShippingAddress(msrest.serialization.Model): :type state_or_province: str :param country: Required. Name of the Country. :type country: str - :param postal_code: Required. Postal code. + :param postal_code: Postal code. :type postal_code: str :param zip_extended_code: Extended Zip Code. :type zip_extended_code: str :param company_name: Name of the company. :type company_name: str :param address_type: Type of address. Possible values include: "None", "Residential", - "Commercial". - :type address_type: str or ~azure.mgmt.databox.models.AddressType + "Commercial". Default value: "None". + :type address_type: str or ~data_box_management_client.models.AddressType """ _validation = { 'street_address1': {'required': True}, 'country': {'required': True}, - 'postal_code': {'required': True}, } _attribute_map = { @@ -2903,14 +3462,14 @@ def __init__( *, street_address1: str, country: str, - postal_code: str, street_address2: Optional[str] = None, street_address3: Optional[str] = None, city: Optional[str] = None, state_or_province: Optional[str] = None, + postal_code: Optional[str] = None, zip_extended_code: Optional[str] = None, company_name: Optional[str] = None, - address_type: Optional[Union[str, "AddressType"]] = None, + address_type: Optional[Union[str, "AddressType"]] = "None", **kwargs ): super(ShippingAddress, self).__init__(**kwargs) @@ -2933,7 +3492,7 @@ class Sku(msrest.serialization.Model): :param name: Required. The sku name. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type name: str or ~azure.mgmt.databox.models.SkuName + :type name: str or ~data_box_management_client.models.SkuName :param display_name: The display name of the sku. :type display_name: str :param family: The sku family. @@ -2967,20 +3526,19 @@ def __init__( class SkuAvailabilityValidationRequest(ValidationInputRequest): """Request to validate sku availability. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :param device_type: Required. Device type to be used for the job. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type device_type: str or ~azure.mgmt.databox.models.SkuName - :ivar transfer_type: Required. Type of the transfer. Default value: "ImportToAzure". - :vartype transfer_type: str + :type device_type: str or ~data_box_management_client.models.SkuName + :param transfer_type: Required. Type of the transfer. Possible values include: "ImportToAzure", + "ExportFromAzure". + :type transfer_type: str or ~data_box_management_client.models.TransferType :param country: Required. ISO country code. Country for hardware shipment. For codes check: https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements. :type country: str @@ -2992,7 +3550,7 @@ class SkuAvailabilityValidationRequest(ValidationInputRequest): _validation = { 'validation_type': {'required': True}, 'device_type': {'required': True}, - 'transfer_type': {'required': True, 'constant': True}, + 'transfer_type': {'required': True}, 'country': {'required': True}, 'location': {'required': True}, } @@ -3005,12 +3563,11 @@ class SkuAvailabilityValidationRequest(ValidationInputRequest): 'location': {'key': 'location', 'type': 'str'}, } - transfer_type = "ImportToAzure" - def __init__( self, *, device_type: Union[str, "SkuName"], + transfer_type: Union[str, "TransferType"], country: str, location: str, **kwargs @@ -3018,6 +3575,7 @@ def __init__( super(SkuAvailabilityValidationRequest, self).__init__(**kwargs) self.validation_type = 'ValidateSkuAvailability' # type: str self.device_type = device_type + self.transfer_type = transfer_type self.country = country self.location = location @@ -3030,15 +3588,15 @@ class SkuAvailabilityValidationResponseProperties(ValidationInputResponse): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar status: Sku availability validation status. Possible values include: "Valid", "Invalid", "Skipped". - :vartype status: str or ~azure.mgmt.databox.models.ValidationStatus + :vartype status: str or ~data_box_management_client.models.ValidationStatus """ _validation = { @@ -3049,7 +3607,7 @@ class SkuAvailabilityValidationResponseProperties(ValidationInputResponse): _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'status': {'key': 'status', 'type': 'str'}, } @@ -3101,16 +3659,22 @@ class SkuCost(msrest.serialization.Model): :vartype meter_id: str :ivar meter_type: The type of the meter. :vartype meter_type: str + :ivar multiplier: Multiplier specifies the region specific value to be multiplied with 1$ guid. + Eg: Our new regions will be using 1$ shipping guid with appropriate multiplier specific to + region. + :vartype multiplier: float """ _validation = { 'meter_id': {'readonly': True}, 'meter_type': {'readonly': True}, + 'multiplier': {'readonly': True}, } _attribute_map = { 'meter_id': {'key': 'meterId', 'type': 'str'}, 'meter_type': {'key': 'meterType', 'type': 'str'}, + 'multiplier': {'key': 'multiplier', 'type': 'float'}, } def __init__( @@ -3120,6 +3684,7 @@ def __init__( super(SkuCost, self).__init__(**kwargs) self.meter_id = None self.meter_type = None + self.multiplier = None class SkuInformation(msrest.serialization.Model): @@ -3128,21 +3693,21 @@ class SkuInformation(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. :ivar sku: The Sku. - :vartype sku: ~azure.mgmt.databox.models.Sku + :vartype sku: ~data_box_management_client.models.Sku :ivar enabled: The sku is enabled or not. :vartype enabled: bool - :ivar destination_to_service_location_map: The map of destination location to service location. - :vartype destination_to_service_location_map: - list[~azure.mgmt.databox.models.DestinationToServiceLocationMap] + :ivar data_location_to_service_location_map: The map of data location to service location. + :vartype data_location_to_service_location_map: + list[~data_box_management_client.models.DataLocationToServiceLocationMap] :ivar capacity: Capacity of the Sku. - :vartype capacity: ~azure.mgmt.databox.models.SkuCapacity + :vartype capacity: ~data_box_management_client.models.SkuCapacity :ivar costs: Cost of the Sku. - :vartype costs: list[~azure.mgmt.databox.models.SkuCost] + :vartype costs: list[~data_box_management_client.models.SkuCost] :ivar api_versions: Api versions that support this Sku. :vartype api_versions: list[str] :ivar disabled_reason: Reason why the Sku is disabled. Possible values include: "None", "Country", "Region", "Feature", "OfferType", "NoSubscriptionInfo". - :vartype disabled_reason: str or ~azure.mgmt.databox.models.SkuDisabledReason + :vartype disabled_reason: str or ~data_box_management_client.models.SkuDisabledReason :ivar disabled_reason_message: Message for why the Sku is disabled. :vartype disabled_reason_message: str :ivar required_feature: Required feature to access the sku. @@ -3152,7 +3717,7 @@ class SkuInformation(msrest.serialization.Model): _validation = { 'sku': {'readonly': True}, 'enabled': {'readonly': True}, - 'destination_to_service_location_map': {'readonly': True}, + 'data_location_to_service_location_map': {'readonly': True}, 'capacity': {'readonly': True}, 'costs': {'readonly': True}, 'api_versions': {'readonly': True}, @@ -3164,7 +3729,7 @@ class SkuInformation(msrest.serialization.Model): _attribute_map = { 'sku': {'key': 'sku', 'type': 'Sku'}, 'enabled': {'key': 'enabled', 'type': 'bool'}, - 'destination_to_service_location_map': {'key': 'properties.destinationToServiceLocationMap', 'type': '[DestinationToServiceLocationMap]'}, + 'data_location_to_service_location_map': {'key': 'properties.dataLocationToServiceLocationMap', 'type': '[DataLocationToServiceLocationMap]'}, 'capacity': {'key': 'properties.capacity', 'type': 'SkuCapacity'}, 'costs': {'key': 'properties.costs', 'type': '[SkuCost]'}, 'api_versions': {'key': 'properties.apiVersions', 'type': '[str]'}, @@ -3180,7 +3745,7 @@ def __init__( super(SkuInformation, self).__init__(**kwargs) self.sku = None self.enabled = None - self.destination_to_service_location_map = None + self.data_location_to_service_location_map = None self.capacity = None self.costs = None self.api_versions = None @@ -3189,16 +3754,58 @@ def __init__( self.required_feature = None +class StorageAccountDetails(DataAccountDetails): + """Details for the storage account. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Account Type of the data to be transferred.Constant filled + by server. Possible values include: "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param share_password: Password for all the shares to be created on the device. Should not be + passed for TransferType:ExportFromAzure jobs. If this is not passed, the service will generate + password itself. This will not be returned in Get Call. Password Requirements : Password must + be minimum of 12 and maximum of 64 characters. Password must have at least one uppercase + alphabet, one number and one special character. Password cannot have the following characters : + IilLoO0 Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. + :type share_password: str + :param storage_account_id: Required. Storage Account Resource Id. + :type storage_account_id: str + """ + + _validation = { + 'data_account_type': {'required': True}, + 'storage_account_id': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'share_password': {'key': 'sharePassword', 'type': 'str'}, + 'storage_account_id': {'key': 'storageAccountId', 'type': 'str'}, + } + + def __init__( + self, + *, + storage_account_id: str, + share_password: Optional[str] = None, + **kwargs + ): + super(StorageAccountDetails, self).__init__(share_password=share_password, **kwargs) + self.data_account_type = 'StorageAccount' # type: str + self.storage_account_id = storage_account_id + + class SubscriptionIsAllowedToCreateJobValidationRequest(ValidationInputRequest): """Request to validate subscription permission to create jobs. All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator """ _validation = { @@ -3225,15 +3832,15 @@ class SubscriptionIsAllowedToCreateJobValidationResponseProperties(ValidationInp All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar status: Validation status of subscription permission to create job. Possible values include: "Valid", "Invalid", "Skipped". - :vartype status: str or ~azure.mgmt.databox.models.ValidationStatus + :vartype status: str or ~data_box_management_client.models.ValidationStatus """ _validation = { @@ -3244,7 +3851,7 @@ class SubscriptionIsAllowedToCreateJobValidationResponseProperties(ValidationInp _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'status': {'key': 'status', 'type': 'str'}, } @@ -3257,6 +3864,224 @@ def __init__( self.status = None +class SystemData(msrest.serialization.Model): + """Provides details about resource creation and update time. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar created_by: A string identifier for the identity that created the resource. + :vartype created_by: str + :ivar created_by_type: The type of identity that created the resource: user, application, + managedIdentity. + :vartype created_by_type: str + :ivar created_at: The timestamp of resource creation (UTC). + :vartype created_at: ~datetime.datetime + :ivar last_modified_by: A string identifier for the identity that last modified the resource. + :vartype last_modified_by: str + :ivar last_modified_by_type: The type of identity that last modified the resource: user, + application, managedIdentity. + :vartype last_modified_by_type: str + :ivar last_modified_at: The timestamp of resource last modification (UTC). + :vartype last_modified_at: ~datetime.datetime + """ + + _validation = { + 'created_by': {'readonly': True}, + 'created_by_type': {'readonly': True}, + 'created_at': {'readonly': True}, + 'last_modified_by': {'readonly': True}, + 'last_modified_by_type': {'readonly': True}, + 'last_modified_at': {'readonly': True}, + } + + _attribute_map = { + 'created_by': {'key': 'createdBy', 'type': 'str'}, + 'created_by_type': {'key': 'createdByType', 'type': 'str'}, + 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, + 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, + 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, + 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, + } + + def __init__( + self, + **kwargs + ): + super(SystemData, self).__init__(**kwargs) + self.created_by = None + self.created_by_type = None + self.created_at = None + self.last_modified_by = None + self.last_modified_by_type = None + self.last_modified_at = None + + +class TransferAllDetails(msrest.serialization.Model): + """Details to transfer all data. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Type of the account of data. Possible values include: + "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param transfer_all_blobs: To indicate if all Azure blobs have to be transferred. + :type transfer_all_blobs: bool + :param transfer_all_files: To indicate if all Azure Files have to be transferred. + :type transfer_all_files: bool + """ + + _validation = { + 'data_account_type': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'transfer_all_blobs': {'key': 'transferAllBlobs', 'type': 'bool'}, + 'transfer_all_files': {'key': 'transferAllFiles', 'type': 'bool'}, + } + + def __init__( + self, + *, + data_account_type: Union[str, "DataAccountType"], + transfer_all_blobs: Optional[bool] = None, + transfer_all_files: Optional[bool] = None, + **kwargs + ): + super(TransferAllDetails, self).__init__(**kwargs) + self.data_account_type = data_account_type + self.transfer_all_blobs = transfer_all_blobs + self.transfer_all_files = transfer_all_files + + +class TransferConfiguration(msrest.serialization.Model): + """Configuration for defining the transfer of data. + + All required parameters must be populated in order to send to Azure. + + :param transfer_configuration_type: Required. Type of the configuration for transfer. Possible + values include: "TransferAll", "TransferUsingFilter". + :type transfer_configuration_type: str or + ~data_box_management_client.models.TransferConfigurationType + :param transfer_filter_details: Map of filter type and the details to filter. This field is + required only if the TransferConfigurationType is given as TransferUsingFilter. + :type transfer_filter_details: + ~data_box_management_client.models.TransferConfigurationTransferFilterDetails + :param transfer_all_details: Map of filter type and the details to transfer all data. This + field is required only if the TransferConfigurationType is given as TransferAll. + :type transfer_all_details: + ~data_box_management_client.models.TransferConfigurationTransferAllDetails + """ + + _validation = { + 'transfer_configuration_type': {'required': True}, + } + + _attribute_map = { + 'transfer_configuration_type': {'key': 'transferConfigurationType', 'type': 'str'}, + 'transfer_filter_details': {'key': 'transferFilterDetails', 'type': 'TransferConfigurationTransferFilterDetails'}, + 'transfer_all_details': {'key': 'transferAllDetails', 'type': 'TransferConfigurationTransferAllDetails'}, + } + + def __init__( + self, + *, + transfer_configuration_type: Union[str, "TransferConfigurationType"], + transfer_filter_details: Optional["TransferConfigurationTransferFilterDetails"] = None, + transfer_all_details: Optional["TransferConfigurationTransferAllDetails"] = None, + **kwargs + ): + super(TransferConfiguration, self).__init__(**kwargs) + self.transfer_configuration_type = transfer_configuration_type + self.transfer_filter_details = transfer_filter_details + self.transfer_all_details = transfer_all_details + + +class TransferConfigurationTransferAllDetails(msrest.serialization.Model): + """Map of filter type and the details to transfer all data. This field is required only if the TransferConfigurationType is given as TransferAll. + + :param include: Details to transfer all data. + :type include: ~data_box_management_client.models.TransferAllDetails + """ + + _attribute_map = { + 'include': {'key': 'include', 'type': 'TransferAllDetails'}, + } + + def __init__( + self, + *, + include: Optional["TransferAllDetails"] = None, + **kwargs + ): + super(TransferConfigurationTransferAllDetails, self).__init__(**kwargs) + self.include = include + + +class TransferConfigurationTransferFilterDetails(msrest.serialization.Model): + """Map of filter type and the details to filter. This field is required only if the TransferConfigurationType is given as TransferUsingFilter. + + :param include: Details of the filtering the transfer of data. + :type include: ~data_box_management_client.models.TransferFilterDetails + """ + + _attribute_map = { + 'include': {'key': 'include', 'type': 'TransferFilterDetails'}, + } + + def __init__( + self, + *, + include: Optional["TransferFilterDetails"] = None, + **kwargs + ): + super(TransferConfigurationTransferFilterDetails, self).__init__(**kwargs) + self.include = include + + +class TransferFilterDetails(msrest.serialization.Model): + """Details of the filtering the transfer of data. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Type of the account of data. Possible values include: + "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param blob_filter_details: Filter details to transfer blobs. + :type blob_filter_details: ~data_box_management_client.models.BlobFilterDetails + :param azure_file_filter_details: Filter details to transfer Azure files. + :type azure_file_filter_details: ~data_box_management_client.models.AzureFileFilterDetails + :param filter_file_details: Details of the filter files to be used for data transfer. + :type filter_file_details: list[~data_box_management_client.models.FilterFileDetails] + """ + + _validation = { + 'data_account_type': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'blob_filter_details': {'key': 'blobFilterDetails', 'type': 'BlobFilterDetails'}, + 'azure_file_filter_details': {'key': 'azureFileFilterDetails', 'type': 'AzureFileFilterDetails'}, + 'filter_file_details': {'key': 'filterFileDetails', 'type': '[FilterFileDetails]'}, + } + + def __init__( + self, + *, + data_account_type: Union[str, "DataAccountType"], + blob_filter_details: Optional["BlobFilterDetails"] = None, + azure_file_filter_details: Optional["AzureFileFilterDetails"] = None, + filter_file_details: Optional[List["FilterFileDetails"]] = None, + **kwargs + ): + super(TransferFilterDetails, self).__init__(**kwargs) + self.data_account_type = data_account_type + self.blob_filter_details = blob_filter_details + self.azure_file_filter_details = azure_file_filter_details + self.filter_file_details = filter_file_details + + class TransportAvailabilityDetails(msrest.serialization.Model): """Transport options availability details for given region. @@ -3264,7 +4089,7 @@ class TransportAvailabilityDetails(msrest.serialization.Model): :ivar shipment_type: Transport Shipment Type supported for given region. Possible values include: "CustomerManaged", "MicrosoftManaged". - :vartype shipment_type: str or ~azure.mgmt.databox.models.TransportShipmentTypes + :vartype shipment_type: str or ~data_box_management_client.models.TransportShipmentTypes """ _validation = { @@ -3288,7 +4113,7 @@ class TransportAvailabilityRequest(msrest.serialization.Model): :param sku_name: Type of the device. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type sku_name: str or ~azure.mgmt.databox.models.SkuName + :type sku_name: str or ~data_box_management_client.models.SkuName """ _attribute_map = { @@ -3312,7 +4137,7 @@ class TransportAvailabilityResponse(msrest.serialization.Model): :ivar transport_availability_details: List of transport availability details for given region. :vartype transport_availability_details: - list[~azure.mgmt.databox.models.TransportAvailabilityDetails] + list[~data_box_management_client.models.TransportAvailabilityDetails] """ _validation = { @@ -3338,7 +4163,7 @@ class TransportPreferences(msrest.serialization.Model): :param preferred_shipment_type: Required. Indicates Shipment Logistics type that the customer preferred. Possible values include: "CustomerManaged", "MicrosoftManaged". - :type preferred_shipment_type: str or ~azure.mgmt.databox.models.TransportShipmentTypes + :type preferred_shipment_type: str or ~data_box_management_client.models.TransportShipmentTypes """ _validation = { @@ -3367,7 +4192,7 @@ class UnencryptedCredentials(msrest.serialization.Model): :ivar job_name: Name of the job. :vartype job_name: str :ivar job_secrets: Secrets related to this job. - :vartype job_secrets: ~azure.mgmt.databox.models.JobSecrets + :vartype job_secrets: ~data_box_management_client.models.JobSecrets """ _validation = { @@ -3393,7 +4218,7 @@ class UnencryptedCredentialsList(msrest.serialization.Model): """List of unencrypted credentials for accessing device. :param value: List of unencrypted credentials. - :type value: list[~azure.mgmt.databox.models.UnencryptedCredentials] + :type value: list[~data_box_management_client.models.UnencryptedCredentials] :param next_link: Link for the next set of unencrypted credentials. :type next_link: str """ @@ -3419,14 +4244,17 @@ class UpdateJobDetails(msrest.serialization.Model): """Job details for update. :param contact_details: Contact details for notification and shipping. - :type contact_details: ~azure.mgmt.databox.models.ContactDetails + :type contact_details: ~data_box_management_client.models.ContactDetails :param shipping_address: Shipping address of the customer. - :type shipping_address: ~azure.mgmt.databox.models.ShippingAddress + :type shipping_address: ~data_box_management_client.models.ShippingAddress + :param key_encryption_key: Key encryption key for the job. + :type key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey """ _attribute_map = { 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, } def __init__( @@ -3434,11 +4262,64 @@ def __init__( *, contact_details: Optional["ContactDetails"] = None, shipping_address: Optional["ShippingAddress"] = None, + key_encryption_key: Optional["KeyEncryptionKey"] = None, **kwargs ): super(UpdateJobDetails, self).__init__(**kwargs) self.contact_details = contact_details self.shipping_address = shipping_address + self.key_encryption_key = key_encryption_key + + +class UserAssignedIdentity(msrest.serialization.Model): + """Class defining User assigned identity details. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar principal_id: The principal id of user assigned identity. + :vartype principal_id: str + :ivar client_id: The client id of user assigned identity. + :vartype client_id: str + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'client_id': {'readonly': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'client_id': {'key': 'clientId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(UserAssignedIdentity, self).__init__(**kwargs) + self.principal_id = None + self.client_id = None + + +class UserAssignedProperties(msrest.serialization.Model): + """User assigned identity properties. + + :param resource_id: Arm resource id for user assigned identity to be used to fetch MSI token. + :type resource_id: str + """ + + _attribute_map = { + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + } + + def __init__( + self, + *, + resource_id: Optional[str] = None, + **kwargs + ): + super(UserAssignedProperties, self).__init__(**kwargs) + self.resource_id = resource_id class ValidateAddress(ValidationInputRequest): @@ -3447,17 +4328,17 @@ class ValidateAddress(ValidationInputRequest): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :param shipping_address: Required. Shipping address of the customer. - :type shipping_address: ~azure.mgmt.databox.models.ShippingAddress + :type shipping_address: ~data_box_management_client.models.ShippingAddress :param device_type: Required. Device type to be used for the job. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type device_type: str or ~azure.mgmt.databox.models.SkuName + :type device_type: str or ~data_box_management_client.models.SkuName :param transport_preferences: Preferences related to the shipment logistics of the sku. - :type transport_preferences: ~azure.mgmt.databox.models.TransportPreferences + :type transport_preferences: ~data_box_management_client.models.TransportPreferences """ _validation = { @@ -3495,10 +4376,11 @@ class ValidationResponse(msrest.serialization.Model): :ivar status: Overall validation status. Possible values include: "AllValidToProceed", "InputsRevisitRequired", "CertainInputValidationsSkipped". - :vartype status: str or ~azure.mgmt.databox.models.OverallValidationStatus + :vartype status: str or ~data_box_management_client.models.OverallValidationStatus :ivar individual_response_details: List of response details contain validationType and its response as key and value respectively. - :vartype individual_response_details: list[~azure.mgmt.databox.models.ValidationInputResponse] + :vartype individual_response_details: + list[~data_box_management_client.models.ValidationInputResponse] """ _validation = { diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/operations/__init__.py b/src/databox/azext_databox/vendored_sdks/databox/operations/__init__.py similarity index 83% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/operations/__init__.py rename to src/databox/azext_databox/vendored_sdks/databox/operations/__init__.py index 9c8fa7a8253..bd13cc67afb 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/operations/__init__.py +++ b/src/databox/azext_databox/vendored_sdks/databox/operations/__init__.py @@ -8,10 +8,12 @@ from ._operations import Operations from ._jobs_operations import JobsOperations +from ._data_box_management_client_operations import DataBoxManagementClientOperationsMixin from ._service_operations import ServiceOperations __all__ = [ 'Operations', 'JobsOperations', + 'DataBoxManagementClientOperationsMixin', 'ServiceOperations', ] diff --git a/src/databox/azext_databox/vendored_sdks/databox/operations/_data_box_management_client_operations.py b/src/databox/azext_databox/vendored_sdks/databox/operations/_data_box_management_client_operations.py new file mode 100644 index 00000000000..a502fe73f6e --- /dev/null +++ b/src/databox/azext_databox/vendored_sdks/databox/operations/_data_box_management_client_operations.py @@ -0,0 +1,91 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class DataBoxManagementClientOperationsMixin(object): + + def mitigate( + self, + job_name, # type: str + resource_group_name, # type: str + mitigate_job_request, # type: "models.MitigateJobRequest" + **kwargs # type: Any + ): + # type: (...) -> None + """Request to mitigate for a given job. + + :param job_name: The name of the job Resource within the specified resource group. job names + must be between 3 and 24 characters in length and use any alphanumeric and underscore only. + :type job_name: str + :param resource_group_name: The Resource Group Name. + :type resource_group_name: str + :param mitigate_job_request: Mitigation Request. + :type mitigate_job_request: ~data_box_management_client.models.MitigateJobRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-03-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.mitigate.metadata['url'] # type: ignore + path_format_arguments = { + 'jobName': self._serialize.url("job_name", job_name, 'str', max_length=24, min_length=3, pattern=r'^[-\w\.]+$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(mitigate_job_request, 'MitigateJobRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + mitigate.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBox/jobs/{jobName}/mitigate'} # type: ignore diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/operations/_jobs_operations.py b/src/databox/azext_databox/vendored_sdks/databox/operations/_jobs_operations.py similarity index 89% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/operations/_jobs_operations.py rename to src/databox/azext_databox/vendored_sdks/databox/operations/_jobs_operations.py index e984b83e43a..f27f0dc5fc7 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/operations/_jobs_operations.py +++ b/src/databox/azext_databox/vendored_sdks/databox/operations/_jobs_operations.py @@ -16,7 +16,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling -from .. import models as _models +from .. import models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports @@ -32,14 +32,14 @@ class JobsOperations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.databox.models + :type models: ~data_box_management_client.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ - models = _models + models = models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -52,7 +52,7 @@ def list( skip_token=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> Iterable["_models.JobResourceList"] + # type: (...) -> Iterable["models.JobResourceList"] """Lists all the jobs available under the subscription. :param skip_token: $skipToken is supported on Get list of jobs, which provides the next page in @@ -60,15 +60,15 @@ def list( :type skip_token: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either JobResourceList or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databox.models.JobResourceList] + :rtype: ~azure.core.paging.ItemPaged[~data_box_management_client.models.JobResourceList] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResourceList"] + cls = kwargs.pop('cls', None) # type: ClsType["models.JobResourceList"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" accept = "application/json" def prepare_request(next_link=None): @@ -110,8 +110,9 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -126,7 +127,7 @@ def list_by_resource_group( skip_token=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> Iterable["_models.JobResourceList"] + # type: (...) -> Iterable["models.JobResourceList"] """Lists all the jobs available under the given resource group. :param resource_group_name: The Resource Group Name. @@ -136,15 +137,15 @@ def list_by_resource_group( :type skip_token: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either JobResourceList or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databox.models.JobResourceList] + :rtype: ~azure.core.paging.ItemPaged[~data_box_management_client.models.JobResourceList] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResourceList"] + cls = kwargs.pop('cls', None) # type: ClsType["models.JobResourceList"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" accept = "application/json" def prepare_request(next_link=None): @@ -187,8 +188,9 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -204,7 +206,7 @@ def get( expand=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "_models.JobResource" + # type: (...) -> "models.JobResource" """Gets information about the specified job. :param resource_group_name: The Resource Group Name. @@ -217,15 +219,15 @@ def get( :type expand: str :keyword callable cls: A custom type or function that will be passed the direct response :return: JobResource, or the result of cls(response) - :rtype: ~azure.mgmt.databox.models.JobResource + :rtype: ~data_box_management_client.models.JobResource :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResource"] + cls = kwargs.pop('cls', None) # type: ClsType["models.JobResource"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" accept = "application/json" # Construct URL @@ -253,7 +255,8 @@ def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('JobResource', pipeline_response) @@ -267,16 +270,16 @@ def _create_initial( self, resource_group_name, # type: str job_name, # type: str - job_resource, # type: "_models.JobResource" + job_resource, # type: "models.JobResource" **kwargs # type: Any ): - # type: (...) -> Optional["_models.JobResource"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.JobResource"]] + # type: (...) -> Optional["models.JobResource"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.JobResource"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -307,7 +310,8 @@ def _create_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: @@ -323,10 +327,10 @@ def begin_create( self, resource_group_name, # type: str job_name, # type: str - job_resource, # type: "_models.JobResource" + job_resource, # type: "models.JobResource" **kwargs # type: Any ): - # type: (...) -> LROPoller["_models.JobResource"] + # type: (...) -> LROPoller["models.JobResource"] """Creates a new job with the specified parameters. Existing job cannot be updated with this API and should instead be updated with the Update job API. @@ -336,7 +340,7 @@ def begin_create( must be between 3 and 24 characters in length and use any alphanumeric and underscore only. :type job_name: str :param job_resource: Job details from request body. - :type job_resource: ~azure.mgmt.databox.models.JobResource + :type job_resource: ~data_box_management_client.models.JobResource :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a @@ -344,11 +348,11 @@ def begin_create( :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either JobResource or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databox.models.JobResource] + :rtype: ~azure.core.polling.LROPoller[~data_box_management_client.models.JobResource] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResource"] + cls = kwargs.pop('cls', None) # type: ClsType["models.JobResource"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -405,7 +409,7 @@ def _delete_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" accept = "application/json" # Construct URL @@ -429,9 +433,10 @@ def _delete_initial( pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [202, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) @@ -508,17 +513,17 @@ def _update_initial( self, resource_group_name, # type: str job_name, # type: str - job_resource_update_parameter, # type: "_models.JobResourceUpdateParameter" + job_resource_update_parameter, # type: "models.JobResourceUpdateParameter" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> Optional["_models.JobResource"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.JobResource"]] + # type: (...) -> Optional["models.JobResource"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.JobResource"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -551,7 +556,8 @@ def _update_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: @@ -567,11 +573,11 @@ def begin_update( self, resource_group_name, # type: str job_name, # type: str - job_resource_update_parameter, # type: "_models.JobResourceUpdateParameter" + job_resource_update_parameter, # type: "models.JobResourceUpdateParameter" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> LROPoller["_models.JobResource"] + # type: (...) -> LROPoller["models.JobResource"] """Updates the properties of an existing job. :param resource_group_name: The Resource Group Name. @@ -580,7 +586,7 @@ def begin_update( must be between 3 and 24 characters in length and use any alphanumeric and underscore only. :type job_name: str :param job_resource_update_parameter: Job update parameters from request body. - :type job_resource_update_parameter: ~azure.mgmt.databox.models.JobResourceUpdateParameter + :type job_resource_update_parameter: ~data_box_management_client.models.JobResourceUpdateParameter :param if_match: Defines the If-Match condition. The patch will be performed only if the ETag of the job on the server matches this value. :type if_match: str @@ -591,11 +597,11 @@ def begin_update( :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either JobResource or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databox.models.JobResource] + :rtype: ~azure.core.polling.LROPoller[~data_box_management_client.models.JobResource] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResource"] + cls = kwargs.pop('cls', None) # type: ClsType["models.JobResource"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -645,10 +651,10 @@ def book_shipment_pick_up( self, resource_group_name, # type: str job_name, # type: str - shipment_pick_up_request, # type: "_models.ShipmentPickUpRequest" + shipment_pick_up_request, # type: "models.ShipmentPickUpRequest" **kwargs # type: Any ): - # type: (...) -> "_models.ShipmentPickUpResponse" + # type: (...) -> "models.ShipmentPickUpResponse" """Book shipment pick up. :param resource_group_name: The Resource Group Name. @@ -657,18 +663,18 @@ def book_shipment_pick_up( must be between 3 and 24 characters in length and use any alphanumeric and underscore only. :type job_name: str :param shipment_pick_up_request: Details of shipment pick up request. - :type shipment_pick_up_request: ~azure.mgmt.databox.models.ShipmentPickUpRequest + :type shipment_pick_up_request: ~data_box_management_client.models.ShipmentPickUpRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ShipmentPickUpResponse, or the result of cls(response) - :rtype: ~azure.mgmt.databox.models.ShipmentPickUpResponse + :rtype: ~data_box_management_client.models.ShipmentPickUpResponse :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ShipmentPickUpResponse"] + cls = kwargs.pop('cls', None) # type: ClsType["models.ShipmentPickUpResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -699,7 +705,8 @@ def book_shipment_pick_up( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ShipmentPickUpResponse', pipeline_response) @@ -713,7 +720,7 @@ def cancel( self, resource_group_name, # type: str job_name, # type: str - cancellation_reason, # type: "_models.CancellationReason" + cancellation_reason, # type: "models.CancellationReason" **kwargs # type: Any ): # type: (...) -> None @@ -725,7 +732,7 @@ def cancel( must be between 3 and 24 characters in length and use any alphanumeric and underscore only. :type job_name: str :param cancellation_reason: Reason for cancellation. - :type cancellation_reason: ~azure.mgmt.databox.models.CancellationReason + :type cancellation_reason: ~data_box_management_client.models.CancellationReason :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None @@ -736,7 +743,7 @@ def cancel( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -767,7 +774,8 @@ def cancel( if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) @@ -780,7 +788,7 @@ def list_credentials( job_name, # type: str **kwargs # type: Any ): - # type: (...) -> Iterable["_models.UnencryptedCredentialsList"] + # type: (...) -> Iterable["models.UnencryptedCredentialsList"] """This method gets the unencrypted secrets related to the job. :param resource_group_name: The Resource Group Name. @@ -790,15 +798,15 @@ def list_credentials( :type job_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either UnencryptedCredentialsList or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databox.models.UnencryptedCredentialsList] + :rtype: ~azure.core.paging.ItemPaged[~data_box_management_client.models.UnencryptedCredentialsList] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.UnencryptedCredentialsList"] + cls = kwargs.pop('cls', None) # type: ClsType["models.UnencryptedCredentialsList"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" accept = "application/json" def prepare_request(next_link=None): @@ -840,8 +848,9 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/operations/_operations.py b/src/databox/azext_databox/vendored_sdks/databox/operations/_operations.py similarity index 88% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/operations/_operations.py rename to src/databox/azext_databox/vendored_sdks/databox/operations/_operations.py index cc8b3483362..938441cc9b8 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/operations/_operations.py +++ b/src/databox/azext_databox/vendored_sdks/databox/operations/_operations.py @@ -14,7 +14,7 @@ from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.mgmt.core.exceptions import ARMErrorFormat -from .. import models as _models +from .. import models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports @@ -30,14 +30,14 @@ class Operations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.databox.models + :type models: ~data_box_management_client.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ - models = _models + models = models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -49,20 +49,20 @@ def list( self, **kwargs # type: Any ): - # type: (...) -> Iterable["_models.OperationList"] + # type: (...) -> Iterable["models.OperationList"] """This method gets all the operations. :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either OperationList or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databox.models.OperationList] + :rtype: ~azure.core.paging.ItemPaged[~data_box_management_client.models.OperationList] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationList"] + cls = kwargs.pop('cls', None) # type: ClsType["models.OperationList"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" accept = "application/json" def prepare_request(next_link=None): @@ -98,8 +98,9 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/operations/_service_operations.py b/src/databox/azext_databox/vendored_sdks/databox/operations/_service_operations.py similarity index 73% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/operations/_service_operations.py rename to src/databox/azext_databox/vendored_sdks/databox/operations/_service_operations.py index f8cf7bdbaa3..ffbc7575c81 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/operations/_service_operations.py +++ b/src/databox/azext_databox/vendored_sdks/databox/operations/_service_operations.py @@ -14,7 +14,7 @@ from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.mgmt.core.exceptions import ARMErrorFormat -from .. import models as _models +from .. import models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports @@ -30,14 +30,14 @@ class ServiceOperations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.databox.models + :type models: ~data_box_management_client.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ - models = _models + models = models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -45,96 +45,14 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config - def list_available_skus( - self, - location, # type: str - available_sku_request, # type: "_models.AvailableSkuRequest" - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.AvailableSkusResult"] - """This method provides the list of available skus for the given subscription and location. - - :param location: The location of the resource. - :type location: str - :param available_sku_request: Filters for showing the available skus. - :type available_sku_request: ~azure.mgmt.databox.models.AvailableSkuRequest - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either AvailableSkusResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databox.models.AvailableSkusResult] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.AvailableSkusResult"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" - content_type = "application/json" - accept = "application/json" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - if not next_link: - # Construct URL - url = self.list_available_skus.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'location': self._serialize.url("location", location, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(available_sku_request, 'AvailableSkuRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(available_sku_request, 'AvailableSkuRequest') - body_content_kwargs['content'] = body_content - request = self._client.get(url, query_parameters, header_parameters, **body_content_kwargs) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('AvailableSkusResult', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_available_skus.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataBox/locations/{location}/availableSkus'} # type: ignore - def list_available_skus_by_resource_group( self, resource_group_name, # type: str location, # type: str - available_sku_request, # type: "_models.AvailableSkuRequest" + available_sku_request, # type: "models.AvailableSkuRequest" **kwargs # type: Any ): - # type: (...) -> Iterable["_models.AvailableSkusResult"] + # type: (...) -> Iterable["models.AvailableSkusResult"] """This method provides the list of available skus for the given subscription, resource group and location. @@ -143,18 +61,18 @@ def list_available_skus_by_resource_group( :param location: The location of the resource. :type location: str :param available_sku_request: Filters for showing the available skus. - :type available_sku_request: ~azure.mgmt.databox.models.AvailableSkuRequest + :type available_sku_request: ~data_box_management_client.models.AvailableSkuRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either AvailableSkusResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databox.models.AvailableSkusResult] + :rtype: ~azure.core.paging.ItemPaged[~data_box_management_client.models.AvailableSkusResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.AvailableSkusResult"] + cls = kwargs.pop('cls', None) # type: ClsType["models.AvailableSkusResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" content_type = "application/json" accept = "application/json" @@ -204,8 +122,9 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -217,28 +136,28 @@ def get_next(next_link=None): def validate_address( self, location, # type: str - validate_address, # type: "_models.ValidateAddress" + validate_address, # type: "models.ValidateAddress" **kwargs # type: Any ): - # type: (...) -> "_models.AddressValidationOutput" - """[DEPRECATED NOTICE: This operation will soon be removed] This method validates the customer + # type: (...) -> "models.AddressValidationOutput" + """[DEPRECATED NOTICE: This operation will soon be removed]. This method validates the customer shipping address and provide alternate addresses if any. :param location: The location of the resource. :type location: str :param validate_address: Shipping address of the customer. - :type validate_address: ~azure.mgmt.databox.models.ValidateAddress + :type validate_address: ~data_box_management_client.models.ValidateAddress :keyword callable cls: A custom type or function that will be passed the direct response :return: AddressValidationOutput, or the result of cls(response) - :rtype: ~azure.mgmt.databox.models.AddressValidationOutput + :rtype: ~data_box_management_client.models.AddressValidationOutput :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.AddressValidationOutput"] + cls = kwargs.pop('cls', None) # type: ClsType["models.AddressValidationOutput"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -268,7 +187,8 @@ def validate_address( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('AddressValidationOutput', pipeline_response) @@ -282,10 +202,10 @@ def validate_inputs_by_resource_group( self, resource_group_name, # type: str location, # type: str - validation_request, # type: "_models.ValidationRequest" + validation_request, # type: "models.ValidationRequest" **kwargs # type: Any ): - # type: (...) -> "_models.ValidationResponse" + # type: (...) -> "models.ValidationResponse" """This method does all necessary pre-job creation validation under resource group. :param resource_group_name: The Resource Group Name. @@ -293,18 +213,18 @@ def validate_inputs_by_resource_group( :param location: The location of the resource. :type location: str :param validation_request: Inputs of the customer. - :type validation_request: ~azure.mgmt.databox.models.ValidationRequest + :type validation_request: ~data_box_management_client.models.ValidationRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ValidationResponse, or the result of cls(response) - :rtype: ~azure.mgmt.databox.models.ValidationResponse + :rtype: ~data_box_management_client.models.ValidationResponse :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ValidationResponse"] + cls = kwargs.pop('cls', None) # type: ClsType["models.ValidationResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -335,7 +255,8 @@ def validate_inputs_by_resource_group( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ValidationResponse', pipeline_response) @@ -348,27 +269,27 @@ def validate_inputs_by_resource_group( def validate_inputs( self, location, # type: str - validation_request, # type: "_models.ValidationRequest" + validation_request, # type: "models.ValidationRequest" **kwargs # type: Any ): - # type: (...) -> "_models.ValidationResponse" + # type: (...) -> "models.ValidationResponse" """This method does all necessary pre-job creation validation under subscription. :param location: The location of the resource. :type location: str :param validation_request: Inputs of the customer. - :type validation_request: ~azure.mgmt.databox.models.ValidationRequest + :type validation_request: ~data_box_management_client.models.ValidationRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ValidationResponse, or the result of cls(response) - :rtype: ~azure.mgmt.databox.models.ValidationResponse + :rtype: ~data_box_management_client.models.ValidationResponse :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ValidationResponse"] + cls = kwargs.pop('cls', None) # type: ClsType["models.ValidationResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -398,7 +319,8 @@ def validate_inputs( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ValidationResponse', pipeline_response) @@ -411,27 +333,28 @@ def validate_inputs( def region_configuration( self, location, # type: str - region_configuration_request, # type: "_models.RegionConfigurationRequest" + region_configuration_request, # type: "models.RegionConfigurationRequest" **kwargs # type: Any ): - # type: (...) -> "_models.RegionConfigurationResponse" - """This API provides configuration details specific to given region/location. + # type: (...) -> "models.RegionConfigurationResponse" + """This API provides configuration details specific to given region/location at Subscription + level. :param location: The location of the resource. :type location: str :param region_configuration_request: Request body to get the configuration for the region. - :type region_configuration_request: ~azure.mgmt.databox.models.RegionConfigurationRequest + :type region_configuration_request: ~data_box_management_client.models.RegionConfigurationRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: RegionConfigurationResponse, or the result of cls(response) - :rtype: ~azure.mgmt.databox.models.RegionConfigurationResponse + :rtype: ~data_box_management_client.models.RegionConfigurationResponse :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.RegionConfigurationResponse"] + cls = kwargs.pop('cls', None) # type: ClsType["models.RegionConfigurationResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-03-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -461,7 +384,8 @@ def region_configuration( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('RegionConfigurationResponse', pipeline_response) @@ -470,3 +394,73 @@ def region_configuration( return deserialized region_configuration.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataBox/locations/{location}/regionConfiguration'} # type: ignore + + def region_configuration_by_resource_group( + self, + resource_group_name, # type: str + location, # type: str + region_configuration_request, # type: "models.RegionConfigurationRequest" + **kwargs # type: Any + ): + # type: (...) -> "models.RegionConfigurationResponse" + """This API provides configuration details specific to given region/location at Resource group + level. + + :param resource_group_name: The Resource Group Name. + :type resource_group_name: str + :param location: The location of the resource. + :type location: str + :param region_configuration_request: Request body to get the configuration for the region at + resource group level. + :type region_configuration_request: ~data_box_management_client.models.RegionConfigurationRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: RegionConfigurationResponse, or the result of cls(response) + :rtype: ~data_box_management_client.models.RegionConfigurationResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.RegionConfigurationResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-03-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.region_configuration_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'location': self._serialize.url("location", location, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(region_configuration_request, 'RegionConfigurationRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('RegionConfigurationResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + region_configuration_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBox/locations/{location}/regionConfiguration'} # type: ignore diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/_configuration.py b/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/_configuration.py deleted file mode 100644 index ff931804797..00000000000 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/_configuration.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from typing import TYPE_CHECKING - -from azure.core.configuration import Configuration -from azure.core.pipeline import policies -from azure.mgmt.core.policies import ARMHttpLoggingPolicy - -from ._version import VERSION - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any - - from azure.core.credentials import TokenCredential - - -class DataBoxManagementClientConfiguration(Configuration): - """Configuration for DataBoxManagementClient. - - Note that all parameters used to create this instance are saved as instance - attributes. - - :param credential: Credential needed for the client to connect to Azure. - :type credential: ~azure.core.credentials.TokenCredential - :param subscription_id: The Subscription Id. - :type subscription_id: str - """ - - def __init__( - self, - credential, # type: "TokenCredential" - subscription_id, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - if credential is None: - raise ValueError("Parameter 'credential' must not be None.") - if subscription_id is None: - raise ValueError("Parameter 'subscription_id' must not be None.") - super(DataBoxManagementClientConfiguration, self).__init__(**kwargs) - - self.credential = credential - self.subscription_id = subscription_id - self.api_version = "2019-09-01" - self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) - kwargs.setdefault('sdk_moniker', 'mgmt-databox/{}'.format(VERSION)) - self._configure(**kwargs) - - def _configure( - self, - **kwargs # type: Any - ): - # type: (...) -> None - self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) - self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) - self.authentication_policy = kwargs.get('authentication_policy') - if self.credential and not self.authentication_policy: - self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/_data_box_management_client.py b/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/_data_box_management_client.py deleted file mode 100644 index e696cb06a78..00000000000 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/_data_box_management_client.py +++ /dev/null @@ -1,80 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from typing import TYPE_CHECKING - -from azure.mgmt.core import ARMPipelineClient -from msrest import Deserializer, Serializer - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Optional - - from azure.core.credentials import TokenCredential - -from ._configuration import DataBoxManagementClientConfiguration -from .operations import Operations -from .operations import JobsOperations -from .operations import ServiceOperations -from . import models - - -class DataBoxManagementClient(object): - """The DataBox Client. - - :ivar operations: Operations operations - :vartype operations: azure.mgmt.databox.operations.Operations - :ivar jobs: JobsOperations operations - :vartype jobs: azure.mgmt.databox.operations.JobsOperations - :ivar service: ServiceOperations operations - :vartype service: azure.mgmt.databox.operations.ServiceOperations - :param credential: Credential needed for the client to connect to Azure. - :type credential: ~azure.core.credentials.TokenCredential - :param subscription_id: The Subscription Id. - :type subscription_id: str - :param str base_url: Service URL - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - """ - - def __init__( - self, - credential, # type: "TokenCredential" - subscription_id, # type: str - base_url=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> None - if not base_url: - base_url = 'https://management.azure.com' - self._config = DataBoxManagementClientConfiguration(credential, subscription_id, **kwargs) - self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) - - client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} - self._serialize = Serializer(client_models) - self._serialize.client_side_validation = False - self._deserialize = Deserializer(client_models) - - self.operations = Operations( - self._client, self._config, self._serialize, self._deserialize) - self.jobs = JobsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.service = ServiceOperations( - self._client, self._config, self._serialize, self._deserialize) - - def close(self): - # type: () -> None - self._client.close() - - def __enter__(self): - # type: () -> DataBoxManagementClient - self._client.__enter__() - return self - - def __exit__(self, *exc_details): - # type: (Any) -> None - self._client.__exit__(*exc_details) diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/_metadata.json b/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/_metadata.json deleted file mode 100644 index 10c0c55446a..00000000000 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/_metadata.json +++ /dev/null @@ -1,63 +0,0 @@ -{ - "chosen_version": "2019-09-01", - "total_api_version_list": ["2019-09-01"], - "client": { - "name": "DataBoxManagementClient", - "filename": "_data_box_management_client", - "description": "The DataBox Client.", - "base_url": "\u0027https://management.azure.com\u0027", - "custom_base_url": null, - "azure_arm": true, - "has_lro_operations": true, - "client_side_validation": false - }, - "global_parameters": { - "sync": { - "credential": { - "signature": "credential, # type: \"TokenCredential\"", - "description": "Credential needed for the client to connect to Azure.", - "docstring_type": "~azure.core.credentials.TokenCredential", - "required": true - }, - "subscription_id": { - "signature": "subscription_id, # type: str", - "description": "The Subscription Id.", - "docstring_type": "str", - "required": true - } - }, - "async": { - "credential": { - "signature": "credential, # type: \"AsyncTokenCredential\"", - "description": "Credential needed for the client to connect to Azure.", - "docstring_type": "~azure.core.credentials_async.AsyncTokenCredential", - "required": true - }, - "subscription_id": { - "signature": "subscription_id, # type: str", - "description": "The Subscription Id.", - "docstring_type": "str", - "required": true - } - }, - "constant": { - }, - "call": "credential, subscription_id" - }, - "config": { - "credential": true, - "credential_scopes": ["https://management.azure.com/.default"], - "credential_default_policy_type": "BearerTokenCredentialPolicy", - "credential_default_policy_type_has_async_version": true, - "credential_key_header_name": null - }, - "operation_groups": { - "operations": "Operations", - "jobs": "JobsOperations", - "service": "ServiceOperations" - }, - "operation_mixins": { - }, - "sync_imports": "None", - "async_imports": "None" -} \ No newline at end of file diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/_configuration.py b/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/_configuration.py deleted file mode 100644 index bd265586067..00000000000 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/_configuration.py +++ /dev/null @@ -1,67 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from typing import Any, TYPE_CHECKING - -from azure.core.configuration import Configuration -from azure.core.pipeline import policies -from azure.mgmt.core.policies import ARMHttpLoggingPolicy - -from .._version import VERSION - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from azure.core.credentials_async import AsyncTokenCredential - - -class DataBoxManagementClientConfiguration(Configuration): - """Configuration for DataBoxManagementClient. - - Note that all parameters used to create this instance are saved as instance - attributes. - - :param credential: Credential needed for the client to connect to Azure. - :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :param subscription_id: The Subscription Id. - :type subscription_id: str - """ - - def __init__( - self, - credential: "AsyncTokenCredential", - subscription_id: str, - **kwargs: Any - ) -> None: - if credential is None: - raise ValueError("Parameter 'credential' must not be None.") - if subscription_id is None: - raise ValueError("Parameter 'subscription_id' must not be None.") - super(DataBoxManagementClientConfiguration, self).__init__(**kwargs) - - self.credential = credential - self.subscription_id = subscription_id - self.api_version = "2019-09-01" - self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) - kwargs.setdefault('sdk_moniker', 'mgmt-databox/{}'.format(VERSION)) - self._configure(**kwargs) - - def _configure( - self, - **kwargs: Any - ) -> None: - self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) - self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) - self.authentication_policy = kwargs.get('authentication_policy') - if self.credential and not self.authentication_policy: - self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/_data_box_management_client.py b/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/_data_box_management_client.py deleted file mode 100644 index b192ff7cd77..00000000000 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/_data_box_management_client.py +++ /dev/null @@ -1,74 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from typing import Any, Optional, TYPE_CHECKING - -from azure.mgmt.core import AsyncARMPipelineClient -from msrest import Deserializer, Serializer - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from azure.core.credentials_async import AsyncTokenCredential - -from ._configuration import DataBoxManagementClientConfiguration -from .operations import Operations -from .operations import JobsOperations -from .operations import ServiceOperations -from .. import models - - -class DataBoxManagementClient(object): - """The DataBox Client. - - :ivar operations: Operations operations - :vartype operations: azure.mgmt.databox.aio.operations.Operations - :ivar jobs: JobsOperations operations - :vartype jobs: azure.mgmt.databox.aio.operations.JobsOperations - :ivar service: ServiceOperations operations - :vartype service: azure.mgmt.databox.aio.operations.ServiceOperations - :param credential: Credential needed for the client to connect to Azure. - :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :param subscription_id: The Subscription Id. - :type subscription_id: str - :param str base_url: Service URL - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - """ - - def __init__( - self, - credential: "AsyncTokenCredential", - subscription_id: str, - base_url: Optional[str] = None, - **kwargs: Any - ) -> None: - if not base_url: - base_url = 'https://management.azure.com' - self._config = DataBoxManagementClientConfiguration(credential, subscription_id, **kwargs) - self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) - - client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} - self._serialize = Serializer(client_models) - self._serialize.client_side_validation = False - self._deserialize = Deserializer(client_models) - - self.operations = Operations( - self._client, self._config, self._serialize, self._deserialize) - self.jobs = JobsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.service = ServiceOperations( - self._client, self._config, self._serialize, self._deserialize) - - async def close(self) -> None: - await self._client.close() - - async def __aenter__(self) -> "DataBoxManagementClient": - await self._client.__aenter__() - return self - - async def __aexit__(self, *exc_details) -> None: - await self._client.__aexit__(*exc_details) diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/py.typed b/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/py.typed deleted file mode 100644 index e5aff4f83af..00000000000 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/py.typed +++ /dev/null @@ -1 +0,0 @@ -# Marker file for PEP 561. \ No newline at end of file diff --git a/src/databox/report.md b/src/databox/report.md new file mode 100644 index 00000000000..3866f31895a --- /dev/null +++ b/src/databox/report.md @@ -0,0 +1,404 @@ +# Azure CLI Module Creation Report + +## EXTENSION +|CLI Extension|Command Groups| +|---------|------------| +|az databox|[groups](#CommandGroups) + +## GROUPS +### Command groups in `az databox` extension +|CLI Command Group|Group Swagger name|Commands| +|---------|------------|--------| +|az databox job|Jobs|[commands](#CommandsInJobs)| +|az databox||[commands](#CommandsIn)| +|az databox service|Service|[commands](#CommandsInService)| + +## COMMANDS +### Commands in `az databox` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az databox mitigate](#Mitigate)|Mitigate|[Parameters](#ParametersMitigate)|[Example](#ExamplesMitigate)| + +### Commands in `az databox job` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az databox job list](#JobsListByResourceGroup)|ListByResourceGroup|[Parameters](#ParametersJobsListByResourceGroup)|[Example](#ExamplesJobsListByResourceGroup)| +|[az databox job list](#JobsList)|List|[Parameters](#ParametersJobsList)|[Example](#ExamplesJobsList)| +|[az databox job show](#JobsGet)|Get|[Parameters](#ParametersJobsGet)|[Example](#ExamplesJobsGet)| +|[az databox job create](#JobsCreate)|Create|[Parameters](#ParametersJobsCreate)|[Example](#ExamplesJobsCreate)| +|[az databox job update](#JobsUpdate)|Update|[Parameters](#ParametersJobsUpdate)|[Example](#ExamplesJobsUpdate)| +|[az databox job delete](#JobsDelete)|Delete|[Parameters](#ParametersJobsDelete)|[Example](#ExamplesJobsDelete)| +|[az databox job book-shipment-pick-up](#JobsBookShipmentPickUp)|BookShipmentPickUp|[Parameters](#ParametersJobsBookShipmentPickUp)|[Example](#ExamplesJobsBookShipmentPickUp)| +|[az databox job cancel](#JobsCancel)|Cancel|[Parameters](#ParametersJobsCancel)|[Example](#ExamplesJobsCancel)| +|[az databox job list-credentials](#JobsListCredentials)|ListCredentials|[Parameters](#ParametersJobsListCredentials)|[Example](#ExamplesJobsListCredentials)| + +### Commands in `az databox service` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az databox service list-available-sku-by-resource-group](#ServiceListAvailableSkusByResourceGroup)|ListAvailableSkusByResourceGroup|[Parameters](#ParametersServiceListAvailableSkusByResourceGroup)|[Example](#ExamplesServiceListAvailableSkusByResourceGroup)| +|[az databox service region-configuration](#ServiceRegionConfiguration)|RegionConfiguration|[Parameters](#ParametersServiceRegionConfiguration)|[Example](#ExamplesServiceRegionConfiguration)| +|[az databox service region-configuration-by-resource-group](#ServiceRegionConfigurationByResourceGroup)|RegionConfigurationByResourceGroup|[Parameters](#ParametersServiceRegionConfigurationByResourceGroup)|[Example](#ExamplesServiceRegionConfigurationByResourceGroup)| +|[az databox service validate-address](#ServiceValidateAddress)|ValidateAddress|[Parameters](#ParametersServiceValidateAddress)|[Example](#ExamplesServiceValidateAddress)| +|[az databox service validate-input](#ServiceValidateInputs)|ValidateInputs|[Parameters](#ParametersServiceValidateInputs)|[Example](#ExamplesServiceValidateInputs)| +|[az databox service validate-input-by-resource-group](#ServiceValidateInputsByResourceGroup)|ValidateInputsByResourceGroup|[Parameters](#ParametersServiceValidateInputsByResourceGroup)|[Example](#ExamplesServiceValidateInputsByResourceGroup)| + + +## COMMAND DETAILS + +### group `az databox` +#### Command `az databox mitigate` + +##### Example +``` +az databox mitigate --job-name "SdkJob8367" --customer-resolution-code "MoveToCleanUpDevice" --resource-group \ +"SdkRg9836" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--job-name**|string|The name of the job Resource within the specified resource group. job names must be between 3 and 24 characters in length and use any alphanumeric and underscore only|job_name|jobName| +|**--resource-group-name**|string|The Resource Group Name|resource_group_name|resourceGroupName| +|**--customer-resolution-code**|sealed-choice|Resolution code for the job|customer_resolution_code|customerResolutionCode| + +### group `az databox job` +#### Command `az databox job list` + +##### Example +``` +az databox job list --resource-group "SdkRg5154" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The Resource Group Name|resource_group_name|resourceGroupName| +|**--skip-token**|string|$skipToken is supported on Get list of jobs, which provides the next page in the list of jobs.|skip_token|$skipToken| + +#### Command `az databox job list` + +##### Example +``` +az databox job list +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +#### Command `az databox job show` + +##### Example +``` +az databox job show --expand "details" --name "SdkJob952" --resource-group "SdkRg5154" +``` +##### Example +``` +az databox job show --expand "details" --name "SdkJob1735" --resource-group "SdkRg7937" +``` +##### Example +``` +az databox job show --expand "details" --name "TJx-637505258985313014" --resource-group "dmstestresource" +``` +##### Example +``` +az databox job show --expand "details" --name "SdkJob6429" --resource-group "SdkRg8091" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The Resource Group Name|resource_group_name|resourceGroupName| +|**--job-name**|string|The name of the job Resource within the specified resource group. job names must be between 3 and 24 characters in length and use any alphanumeric and underscore only|job_name|jobName| +|**--expand**|string|$expand is supported on details parameter for job, which provides details on the job stages.|expand|$expand| + +#### Command `az databox job create` + +##### Example +``` +az databox job create --name "SdkJob952" --location "westus" --transfer-type "ImportToAzure" --details \ +"{\\"contactDetails\\":{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@microsoft.com\\"],\\"phone\ +\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"},\\"dataImportDetails\\":[{\\"accountDetails\\":{\\"dataAccountTyp\ +e\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourcegroups/\ +databoxbvt/providers/Microsoft.Storage/storageAccounts/databoxbvttestaccount\\"}}],\\"jobDetailsType\\":\\"DataBox\\",\ +\\"shippingAddress\\":{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\ +\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 \ +TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit 1\\"}}" --sku name="DataBox" --resource-group "SdkRg5154" +``` +##### Example +``` +az databox job create --name "SdkJob9640" --location "westus" --transfer-type "ImportToAzure" --details \ +"{\\"contactDetails\\":{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@microsoft.com\\"],\\"phone\ +\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"},\\"dataImportDetails\\":[{\\"accountDetails\\":{\\"dataAccountTyp\ +e\\":\\"StorageAccount\\",\\"sharePassword\\":\\"\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8\ +ff7-4a25-95c7-ce9da541242f/resourceGroups/databoxbvt1/providers/Microsoft.Storage/storageAccounts/databoxbvttestaccount\ +2\\"}}],\\"devicePassword\\":\\"\\",\\"jobDetailsType\\":\\"DataBox\\",\\"shippingAddress\\":{\\"addres\ +sType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"po\ +stalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND \ +ST\\",\\"streetAddress2\\":\\"Unit 1\\"}}" --sku name="DataBox" --resource-group "SdkRg7478" +``` +##### Example +``` +az databox job create --name "SdkJob6599" --location "westus" --transfer-type "ImportToAzure" --details \ +"{\\"contactDetails\\":{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@microsoft.com\\"],\\"phone\ +\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"},\\"dataImportDetails\\":[{\\"accountDetails\\":{\\"dataAccountTyp\ +e\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourcegroups/\ +databoxbvt/providers/Microsoft.Storage/storageAccounts/databoxbvttestaccount\\"}}],\\"jobDetailsType\\":\\"DataBox\\",\ +\\"preferences\\":{\\"encryptionPreferences\\":{\\"doubleEncryption\\":\\"Enabled\\"}},\\"shippingAddress\\":{\\"addres\ +sType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"po\ +stalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND \ +ST\\",\\"streetAddress2\\":\\"Unit 1\\"}}" --sku name="DataBox" --resource-group "SdkRg608" +``` +##### Example +``` +az databox job create --name "SdkJob6429" --location "westus" --transfer-type "ExportFromAzure" --details \ +"{\\"contactDetails\\":{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@microsoft.com\\"],\\"phone\ +\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"},\\"dataExportDetails\\":[{\\"accountDetails\\":{\\"dataAccountTyp\ +e\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/\ +akvenkat/providers/Microsoft.Storage/storageAccounts/aaaaaa2\\"},\\"transferConfiguration\\":{\\"transferAllDetails\\":\ +{\\"include\\":{\\"dataAccountType\\":\\"StorageAccount\\",\\"transferAllBlobs\\":true,\\"transferAllFiles\\":true}},\\\ +"transferConfigurationType\\":\\"TransferAll\\"}}],\\"jobDetailsType\\":\\"DataBox\\",\\"shippingAddress\\":{\\"address\ +Type\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"pos\ +talCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND \ +ST\\",\\"streetAddress2\\":\\"Unit 1\\"}}" --sku name="DataBox" --resource-group "SdkRg8091" +``` +##### Example +``` +az databox job create --name "SdkJob5337" --type "UserAssigned" --user-assigned-identities \ +"{\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/akvenkat/providers/Microsoft.ManagedIdentity/us\ +erAssignedIdentities/sdkIdentity\\":{}}" --location "westus" --transfer-type "ImportToAzure" --details \ +"{\\"contactDetails\\":{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@microsoft.com\\"],\\"phone\ +\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"},\\"dataImportDetails\\":[{\\"accountDetails\\":{\\"dataAccountTyp\ +e\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/\ +databoxbvt1/providers/Microsoft.Storage/storageAccounts/databoxbvttestaccount2\\"}}],\\"jobDetailsType\\":\\"DataBox\\"\ +,\\"shippingAddress\\":{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsof\ +t\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 \ +TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit 1\\"}}" --sku name="DataBox" --resource-group "SdkRg7552" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The Resource Group Name|resource_group_name|resourceGroupName| +|**--job-name**|string|The name of the job Resource within the specified resource group. job names must be between 3 and 24 characters in length and use any alphanumeric and underscore only|job_name|jobName| +|**--location**|string|The location of the resource. This will be one of the supported and registered Azure Regions (e.g. West US, East US, Southeast Asia, etc.). The region of a resource cannot be changed once it is created, but if an identical region is specified on update the request will succeed.|location|location| +|**--sku**|object|The sku type.|sku|sku| +|**--transfer-type**|sealed-choice|Type of the data transfer.|transfer_type|transferType| +|**--tags**|dictionary|The list of key value pairs that describe the resource. These tags can be used in viewing and grouping this resource (across resource groups).|tags|tags| +|**--type**|string|Identity type|type|type| +|**--user-assigned-identities**|dictionary|User Assigned Identities|user_assigned_identities|userAssignedIdentities| +|**--details**|object|Details of a job run. This field will only be sent for expand details filter.|details|details| +|**--delivery-type**|sealed-choice|Delivery type of Job.|delivery_type|deliveryType| +|**--scheduled-date-time**|date-time|Scheduled date time.|scheduled_date_time|scheduledDateTime| + +#### Command `az databox job update` + +##### Example +``` +az databox job update --name "SdkJob952" --contact-name "Update Job" --email-list "testing@microsoft.com" --phone \ +"1234567890" --phone-extension "1234" --shipping-address address-type="Commercial" city="San Francisco" \ +company-name="Microsoft" country="US" postal-code="94107" state-or-province="CA" street-address1="16 TOWNSEND ST" \ +street-address2="Unit 1" --resource-group "SdkRg5154" +``` +##### Example +``` +az databox job update --name "SdkJob1735" --kek-type "CustomerManaged" --kek-url "https://sdkkeyvault.vault.azure.net/k\ +eys/SSDKEY/" --kek-vault-resource-id "/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/akvenkat/provi\ +ders/Microsoft.KeyVault/vaults/SDKKeyVault" --resource-group "SdkRg7937" +``` +##### Example +``` +az databox job update --name "SdkJob2965" --resource-identity-type "SystemAssigned,UserAssigned" \ +--user-assigned-identities "{\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/akvenkat/providers/M\ +icrosoft.ManagedIdentity/userAssignedIdentities/sdkIdentity\\":{}}" --type "UserAssigned" --user-assigned \ +resource-id="/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/akvenkat/providers/Microsoft.ManagedIde\ +ntity/userAssignedIdentities/sdkIdentity" --kek-type "CustomerManaged" --kek-url "https://sdkkeyvault.vault.azure.net/k\ +eys/SSDKEY/" --kek-vault-resource-id "/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/akvenkat/provi\ +ders/Microsoft.KeyVault/vaults/SDKKeyVault" --resource-group "SdkRg9765" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The Resource Group Name|resource_group_name|resourceGroupName| +|**--job-name**|string|The name of the job Resource within the specified resource group. job names must be between 3 and 24 characters in length and use any alphanumeric and underscore only|job_name|jobName| +|**--if-match**|string|Defines the If-Match condition. The patch will be performed only if the ETag of the job on the server matches this value.|if_match|If-Match| +|**--tags**|dictionary|The list of key value pairs that describe the resource. These tags can be used in viewing and grouping this resource (across resource groups).|tags|tags| +|**--shipping-address**|object|Shipping address of the customer.|shipping_address|shippingAddress| +|**--kek-type**|sealed-choice|Type of encryption key used for key encryption.|kek_type|kekType| +|**--kek-url**|string|Key encryption key. It is required in case of Customer managed KekType.|kek_url|kekUrl| +|**--kek-vault-resource-id**|string|Kek vault resource id. It is required in case of Customer managed KekType.|kek_vault_resource_id|kekVaultResourceID| +|**--type**|string|Managed service identity type.|type|type| +|**--user-assigned**|object|User assigned identity properties.|user_assigned|userAssigned| +|**--contact-name**|string|Contact name of the person.|contact_name|contactName| +|**--phone**|string|Phone number of the contact person.|phone|phone| +|**--phone-extension**|string|Phone extension number of the contact person.|phone_extension|phoneExtension| +|**--mobile**|string|Mobile number of the contact person.|mobile|mobile| +|**--email-list**|array|List of Email-ids to be notified about job progress.|email_list|emailList| +|**--notification-preference**|array|Notification preference for a job stage.|notification_preference|notificationPreference| +|**--resource-identity-type**|string|Identity type|resource_identity_type|type| +|**--user-assigned-identities**|dictionary|User Assigned Identities|user_assigned_identities|userAssignedIdentities| + +#### Command `az databox job delete` + +##### Example +``` +az databox job delete --name "SdkJob952" --resource-group "SdkRg5154" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The Resource Group Name|resource_group_name|resourceGroupName| +|**--job-name**|string|The name of the job Resource within the specified resource group. job names must be between 3 and 24 characters in length and use any alphanumeric and underscore only|job_name|jobName| + +#### Command `az databox job book-shipment-pick-up` + +##### Example +``` +az databox job book-shipment-pick-up --name "TJ-636646322037905056" --resource-group "bvttoolrg6" --end-time \ +"2019-09-22T18:30:00Z" --shipment-location "Front desk" --start-time "2019-09-20T18:30:00Z" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The Resource Group Name|resource_group_name|resourceGroupName| +|**--job-name**|string|The name of the job Resource within the specified resource group. job names must be between 3 and 24 characters in length and use any alphanumeric and underscore only|job_name|jobName| +|**--start-time**|date-time|Minimum date after which the pick up should commence, this must be in local time of pick up area.|start_time|startTime| +|**--end-time**|date-time|Maximum date before which the pick up should commence, this must be in local time of pick up area.|end_time|endTime| +|**--shipment-location**|string|Shipment Location in the pickup place. Eg.front desk|shipment_location|shipmentLocation| + +#### Command `az databox job cancel` + +##### Example +``` +az databox job cancel --reason "CancelTest" --name "SdkJob952" --resource-group "SdkRg5154" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The Resource Group Name|resource_group_name|resourceGroupName| +|**--job-name**|string|The name of the job Resource within the specified resource group. job names must be between 3 and 24 characters in length and use any alphanumeric and underscore only|job_name|jobName| +|**--reason**|string|Reason for cancellation.|reason|reason| + +#### Command `az databox job list-credentials` + +##### Example +``` +az databox job list-credentials --name "TJ-636646322037905056" --resource-group "bvttoolrg6" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The Resource Group Name|resource_group_name|resourceGroupName| +|**--job-name**|string|The name of the job Resource within the specified resource group. job names must be between 3 and 24 characters in length and use any alphanumeric and underscore only|job_name|jobName| + +### group `az databox service` +#### Command `az databox service list-available-sku-by-resource-group` + +##### Example +``` +az databox service list-available-sku-by-resource-group --country "US" --available-sku-request-location "westus" \ +--transfer-type "ImportToAzure" --location "westus" --resource-group "bvttoolrg6" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The Resource Group Name|resource_group_name|resourceGroupName| +|**--location**|string|The location of the resource|location|location| +|**--transfer-type**|sealed-choice|Type of the transfer.|transfer_type|transferType| +|**--country**|string|ISO country code. Country for hardware shipment. For codes check: https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements|country|country| +|**--available-sku-request-location**|string|Location for data transfer. For locations check: https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01|available_sku_request_location|location| +|**--sku-names**|array|Sku Names to filter for available skus|sku_names|skuNames| + +#### Command `az databox service region-configuration` + +##### Example +``` +az databox service region-configuration --location "westus" --schedule-availability-request \ +"{\\"skuName\\":\\"DataBox\\",\\"storageLocation\\":\\"westus\\"}" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--location**|string|The location of the resource|location|location| +|**--data-box-schedule-availability-request**|object|Request body to get the availability for scheduling data box orders orders.|data_box_schedule_availability_request|DataBoxScheduleAvailabilityRequest| +|**--disk-schedule-availability-request**|object|Request body to get the availability for scheduling disk orders.|disk_schedule_availability_request|DiskScheduleAvailabilityRequest| +|**--heavy-schedule-availability-request**|object|Request body to get the availability for scheduling heavy orders.|heavy_schedule_availability_request|HeavyScheduleAvailabilityRequest| +|**--sku-name**|sealed-choice|Type of the device.|sku_name|skuName| + +#### Command `az databox service region-configuration-by-resource-group` + +##### Example +``` +az databox service region-configuration-by-resource-group --location "westus" --schedule-availability-request \ +"{\\"skuName\\":\\"DataBox\\",\\"storageLocation\\":\\"westus\\"}" --resource-group "SdkRg4981" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The Resource Group Name|resource_group_name|resourceGroupName| +|**--location**|string|The location of the resource|location|location| +|**--data-box-schedule-availability-request**|object|Request body to get the availability for scheduling data box orders orders.|data_box_schedule_availability_request|DataBoxScheduleAvailabilityRequest| +|**--disk-schedule-availability-request**|object|Request body to get the availability for scheduling disk orders.|disk_schedule_availability_request|DiskScheduleAvailabilityRequest| +|**--heavy-schedule-availability-request**|object|Request body to get the availability for scheduling heavy orders.|heavy_schedule_availability_request|HeavyScheduleAvailabilityRequest| +|**--sku-name**|sealed-choice|Type of the device.|sku_name|skuName| + +#### Command `az databox service validate-address` + +##### Example +``` +az databox service validate-address --location "westus" --device-type "DataBox" --shipping-address \ +address-type="Commercial" city="San Francisco" company-name="Microsoft" country="US" postal-code="94107" \ +state-or-province="CA" street-address1="16 TOWNSEND ST" street-address2="Unit 1" --validation-type "ValidateAddress" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--location**|string|The location of the resource|location|location| +|**--validation-type**|sealed-choice|Identifies the type of validation request.|validation_type|validationType| +|**--shipping-address**|object|Shipping address of the customer.|shipping_address|shippingAddress| +|**--device-type**|sealed-choice|Device type to be used for the job.|device_type|deviceType| +|**--preferred-shipment-type**|sealed-choice|Indicates Shipment Logistics type that the customer preferred.|preferred_shipment_type|preferredShipmentType| + +#### Command `az databox service validate-input` + +##### Example +``` +az databox service validate-input --location "westus" --validation-request "{\\"individualRequestDetails\\":[{\\"dataIm\ +portDetails\\":[{\\"accountDetails\\":{\\"dataAccountType\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscripti\ +ons/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourcegroups/databoxbvt/providers/Microsoft.Storage/storageAccounts/databoxb\ +vttestaccount\\"}}],\\"deviceType\\":\\"DataBox\\",\\"transferType\\":\\"ImportToAzure\\",\\"validationType\\":\\"Valid\ +ateDataTransferDetails\\"},{\\"deviceType\\":\\"DataBox\\",\\"shippingAddress\\":{\\"addressType\\":\\"Commercial\\",\\\ +"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"s\ +tateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit \ +1\\"},\\"transportPreferences\\":{\\"preferredShipmentType\\":\\"MicrosoftManaged\\"},\\"validationType\\":\\"ValidateA\ +ddress\\"},{\\"validationType\\":\\"ValidateSubscriptionIsAllowedToCreateJob\\"},{\\"country\\":\\"US\\",\\"deviceType\ +\\":\\"DataBox\\",\\"location\\":\\"westus\\",\\"transferType\\":\\"ImportToAzure\\",\\"validationType\\":\\"ValidateSk\ +uAvailability\\"},{\\"deviceType\\":\\"DataBox\\",\\"validationType\\":\\"ValidateCreateOrderLimit\\"},{\\"deviceType\\\ +":\\"DataBox\\",\\"preference\\":{\\"transportPreferences\\":{\\"preferredShipmentType\\":\\"MicrosoftManaged\\"}},\\"v\ +alidationType\\":\\"ValidatePreferences\\"}],\\"validationCategory\\":\\"JobCreationValidation\\"}" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--location**|string|The location of the resource|location|location| +|**--create-job-validations**|object|It does all pre-job creation validations.|create_job_validations|CreateJobValidations| + +#### Command `az databox service validate-input-by-resource-group` + +##### Example +``` +az databox service validate-input-by-resource-group --location "westus" --resource-group "SdkRg6861" \ +--validation-request "{\\"individualRequestDetails\\":[{\\"dataImportDetails\\":[{\\"accountDetails\\":{\\"dataAccountT\ +ype\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourcegroup\ +s/databoxbvt/providers/Microsoft.Storage/storageAccounts/databoxbvttestaccount\\"}}],\\"deviceType\\":\\"DataBox\\",\\"\ +transferType\\":\\"ImportToAzure\\",\\"validationType\\":\\"ValidateDataTransferDetails\\"},{\\"deviceType\\":\\"DataBo\ +x\\",\\"shippingAddress\\":{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Micr\ +osoft\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 \ +TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit 1\\"},\\"transportPreferences\\":{\\"preferredShipmentType\\":\\"MicrosoftM\ +anaged\\"},\\"validationType\\":\\"ValidateAddress\\"},{\\"validationType\\":\\"ValidateSubscriptionIsAllowedToCreateJo\ +b\\"},{\\"country\\":\\"US\\",\\"deviceType\\":\\"DataBox\\",\\"location\\":\\"westus\\",\\"transferType\\":\\"ImportTo\ +Azure\\",\\"validationType\\":\\"ValidateSkuAvailability\\"},{\\"deviceType\\":\\"DataBox\\",\\"validationType\\":\\"Va\ +lidateCreateOrderLimit\\"},{\\"deviceType\\":\\"DataBox\\",\\"preference\\":{\\"transportPreferences\\":{\\"preferredSh\ +ipmentType\\":\\"MicrosoftManaged\\"}},\\"validationType\\":\\"ValidatePreferences\\"}],\\"validationCategory\\":\\"Job\ +CreationValidation\\"}" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The Resource Group Name|resource_group_name|resourceGroupName| +|**--location**|string|The location of the resource|location|location| +|**--create-job-validations**|object|It does all pre-job creation validations.|create_job_validations|CreateJobValidations| diff --git a/src/databox/setup.cfg b/src/databox/setup.cfg index 3c6e79cf31d..2fdd96e5d39 100644 --- a/src/databox/setup.cfg +++ b/src/databox/setup.cfg @@ -1,2 +1 @@ -[bdist_wheel] -universal=1 +#setup.cfg \ No newline at end of file diff --git a/src/databox/setup.py b/src/databox/setup.py index 84deeb4b6e6..ed1ec325aa7 100644 --- a/src/databox/setup.py +++ b/src/databox/setup.py @@ -8,15 +8,13 @@ from codecs import open from setuptools import setup, find_packages -try: - from azure_bdist_wheel import cmdclass -except ImportError: - from distutils import log as logger - logger.warn("Wheel is not available, disabling bdist_wheel hook") -# TODO: Confirm this is the right version number you want and it matches your # HISTORY.rst entry. -VERSION = '0.1.2' +VERSION = '0.1.0' +try: + from azext_databox.manual.version import VERSION +except ImportError: + pass # The full list of classifiers is available at # https://pypi.python.org/pypi?%3Aaction=list_classifiers @@ -26,17 +24,19 @@ 'Intended Audience :: System Administrators', 'Programming Language :: Python', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'License :: OSI Approved :: MIT License', ] -# TODO: Add any additional SDK dependencies here DEPENDENCIES = [] +try: + from azext_databox.manual.dependency import DEPENDENCIES +except ImportError: + pass + with open('README.md', 'r', encoding='utf-8') as f: README = f.read() with open('HISTORY.rst', 'r', encoding='utf-8') as f: @@ -45,8 +45,7 @@ setup( name='databox', version=VERSION, - description='Microsoft Azure Command-Line Tools DataBox Extension', - # TODO: Update author and email, if applicable + description='Microsoft Azure Command-Line Tools DataBoxManagementClient Extension', author='Microsoft Corporation', author_email='azpycli@microsoft.com', url='https://github.com/Azure/azure-cli-extensions/tree/master/src/databox',