diff --git a/src/databox/HISTORY.rst b/src/databox/HISTORY.rst index c3e5d4838ef..1c139576ba0 100644 --- a/src/databox/HISTORY.rst +++ b/src/databox/HISTORY.rst @@ -3,14 +3,6 @@ Release History =============== -0.1.2 -++++++ -* Migrate to track2 SDK - -0.1.1 -++++++ -* GA databox module. - 0.1.0 ++++++ * Initial release. diff --git a/src/databox/azext_databox/__init__.py b/src/databox/azext_databox/__init__.py index f7ea258697c..d4314b0a3a4 100644 --- a/src/databox/azext_databox/__init__.py +++ b/src/databox/azext_databox/__init__.py @@ -1,32 +1,50 @@ -# -------------------------------------------------------------------------------------------- +# -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- from azure.cli.core import AzCommandsLoader +from azext_databox.generated._help import helps # pylint: disable=unused-import +try: + from azext_databox.manual._help import helps # pylint: disable=reimported +except ImportError: + pass -import azext_databox._help # pylint: disable=unused-import - -class DataBoxCommandsLoader(AzCommandsLoader): +class DataBoxManagementClientCommandsLoader(AzCommandsLoader): def __init__(self, cli_ctx=None): from azure.cli.core.commands import CliCommandType - from azext_databox._client_factory import cf_databox + from azext_databox.generated._client_factory import cf_databox_cl databox_custom = CliCommandType( operations_tmpl='azext_databox.custom#{}', - client_factory=cf_databox) - super(DataBoxCommandsLoader, self).__init__(cli_ctx=cli_ctx, - custom_command_type=databox_custom) + client_factory=cf_databox_cl) + parent = super(DataBoxManagementClientCommandsLoader, self) + parent.__init__(cli_ctx=cli_ctx, custom_command_type=databox_custom) def load_command_table(self, args): - from azext_databox.commands import load_command_table + from azext_databox.generated.commands import load_command_table load_command_table(self, args) + try: + from azext_databox.manual.commands import load_command_table as load_command_table_manual + load_command_table_manual(self, args) + except ImportError: + pass return self.command_table def load_arguments(self, command): - from azext_databox._params import load_arguments + from azext_databox.generated._params import load_arguments load_arguments(self, command) + try: + from azext_databox.manual._params import load_arguments as load_arguments_manual + load_arguments_manual(self, command) + except ImportError: + pass -COMMAND_LOADER_CLS = DataBoxCommandsLoader +COMMAND_LOADER_CLS = DataBoxManagementClientCommandsLoader diff --git a/src/databox/azext_databox/_client_factory.py b/src/databox/azext_databox/_client_factory.py deleted file mode 100644 index 441837babe7..00000000000 --- a/src/databox/azext_databox/_client_factory.py +++ /dev/null @@ -1,14 +0,0 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- - - -def cf_databox(cli_ctx, *_): - from azure.cli.core.commands.client_factory import get_mgmt_service_client - from azext_databox.vendored_sdks.databox import DataBoxManagementClient - return get_mgmt_service_client(cli_ctx, DataBoxManagementClient) - - -def cf_jobs(cli_ctx, *_): - return cf_databox(cli_ctx).jobs diff --git a/src/databox/azext_databox/_help.py b/src/databox/azext_databox/_help.py deleted file mode 100644 index ff8468658c7..00000000000 --- a/src/databox/azext_databox/_help.py +++ /dev/null @@ -1,105 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- - -# pylint: disable=too-many-lines -# pylint: disable=line-too-long -from knack.help_files import helps # pylint: disable=unused-import - -helps['databox'] = """ - type: group - short-summary: Manage databox. -""" - -helps['databox job'] = """ - type: group - short-summary: Commands to manage databox job. -""" - -helps['databox job create'] = """ - type: command - short-summary: Create a new job with the specified parameters. - examples: - - name: Create a databox job to use both storage account and managed disk as data destination. - text: |- - az databox job create --resource-group "SdkRg4981" --name "SdkJob3971" --location \\ - "westus" --sku "DataBox" --contact-name "Public SDK Test" \\ - --phone "1234567890" --email-list "testing@microsoft.com" \\ - --street-address1 "16 TOWNSEND ST" --street-address2 "Unit 1" --city "San Francisco" \\ - --state-or-province "CA" --country "US" --postal-code "94107" --company-name "Microsoft" \\ - --storage-account sa1 sa2 --staging-storage-account sa \\ - --resource-group-for-managed-disk /subscriptions/sub/resourceGroups/rg - - - name: Create a databoxdisk job to use storage account as data destination. - text: |- - az databox job create --resource-group "SdkRg4981" --name "SdkJob3971" --location \\ - "westus" --sku "DataBoxDisk" --expected-data-size 1 --contact-name "Public SDK Test" \\ - --phone "1234567890" --email-list "testing@microsoft.com" --street-address1 "16 TOWNSEND ST" \\ - --street-address2 "Unit 1" --city "San Francisco" --state-or-province "CA" --country "US" \\ - --postal-code "94107" --company-name "Microsoft" --storage-account sa1 -""" - -helps['databox job update'] = """ - type: command - short-summary: Update an existing job with the specified parameters. - examples: - - name: Update the job "SdkJob3971" with the specified parameters. - text: |- - az databox job update --resource-group "SdkRg4981" --name "SdkJob3971" \\ - --contact-name "Update Job" --phone "1234567890" \\ - --email-list "testing@microsoft.com" \\ - --street-address1 "16 TOWNSEND ST" \\ - --city "San Francisco" --state-or-province "CA" \\ - --country "US" --postal-code "94107" \\ - --company-name "Microsoft" \\ -""" - -helps['databox job delete'] = """ - type: command - short-summary: Delete a job. - examples: - - name: Delete the job "SdkJob3971" in resource group "SdkRg4981". - text: |- - az databox job delete --resource-group "SdkRg4981" --name "SdkJob3971" -""" - -helps['databox job show'] = """ - type: command - short-summary: Get information about the specified job. - examples: - - name: Get the information about the job "SdkJob3971". - text: |- - az databox job show --resource-group "SdkRg4981" --name "SdkJob3971" -""" - -helps['databox job list'] = """ - type: command - short-summary: List all the jobs available under the given resource group or the given subscription. - examples: - - name: List all the jobs available under the current subscription. - text: |- - az databox job list - - name: List all the jobs available under the resource group "SdkRg4981". - text: |- - az databox job list --resource-group "SdkRg4981" -""" - -helps['databox job cancel'] = """ - type: command - short-summary: Cancel a job. - examples: - - name: Cancel the job "SdkJob3971" under resource group "SdkRg4981". - text: |- - az databox job cancel --resource-group "SdkRg4981" --name "SdkJob3971" --reason "CancelTest" -""" - -helps['databox job list-credentials'] = """ - type: command - short-summary: List the unencrypted secrets related to the job. - examples: - - name: List the unencrypted secrets related to the job "TJ-636646322037905056". - text: |- - az databox job list-credentials --resource-group "bvttoolrg6" --name "TJ-636646322037905056" -""" diff --git a/src/databox/azext_databox/_params.py b/src/databox/azext_databox/_params.py deleted file mode 100644 index c9d09d4467c..00000000000 --- a/src/databox/azext_databox/_params.py +++ /dev/null @@ -1,75 +0,0 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- -# pylint: disable=line-too-long -# pylint: disable=too-many-lines -# pylint: disable=too-many-statements - -from azure.cli.core.commands.parameters import ( - tags_type, - get_enum_type, - get_location_type -) -from azure.cli.core.commands.validators import get_default_location_from_resource_group -from knack.arguments import CLIArgumentType - - -def load_arguments(self, _): - storage_accounts_type = CLIArgumentType(help='Space-separated list of the destination storage account. It can be the name or resource ID of storage account.', arg_group='Storage Account', nargs='+') - staging_storage_account_type = CLIArgumentType(help='The name or ID of the destination storage account that can be used to copy the vhd for staging.', arg_group='Managed Disk') - resource_group_for_managed_disk_type = CLIArgumentType(help='The name or ID of the destination resource group where the Compute disks should be created.', arg_group='Managed Disk') - job_name_type = CLIArgumentType(options_list=['--name', '-n'], help='The name of the job resource within the specified resource group. job names must be between 3 and 24 characters in length and use any alphanumeric and underscore only') - - with self.argument_context('databox job create') as c: - c.argument('job_name', job_name_type) - c.argument('location', arg_type=get_location_type(self.cli_ctx), default=None, - validator=get_default_location_from_resource_group) - c.argument('tags', tags_type) - c.argument('sku', arg_type=get_enum_type(['DataBox', 'DataBoxDisk', 'DataBoxHeavy']), - help='The sku type of DataBox.') - c.argument('expected_data_size', type=int, help='The expected size of the data which needs to be transferred in this job, in terabytes.The maximum usable capacity is up to 35 TB. This is only needed when sku is DataBoxDisk.') - c.argument('contact_name', help='Contact name of the person.', arg_group='Contact Details') - c.argument('phone', help='Phone number of the contact person.', arg_group='Contact Details') - c.argument('mobile', help='Mobile number of the contact person.', arg_group='Contact Details') - c.argument('email_list', help='Space-separated list of Email addresses to be notified about job progress.', arg_group='Contact Details', nargs='+') - c.argument('street_address1', help='Street Address line 1.', arg_group='Shipping Address') - c.argument('street_address2', help='Street Address line 2.', arg_group='Shipping Address') - c.argument('street_address3', help='Street Address line 3.', arg_group='Shipping Address') - c.argument('city', help='Name of the City.', arg_group='Shipping Address') - c.argument('state_or_province', help='Name of the State or Province.', arg_group='Shipping Address') - c.argument('country', help='Name of the Country. Ex: US', arg_group='Shipping Address') - c.argument('postal_code', help='Postal code.', arg_group='Shipping Address') - c.argument('company_name', help='Name of the company.', arg_group='Shipping Address') - c.extra('storage_accounts', arg_type=storage_accounts_type) - c.extra('staging_storage_account', arg_type=staging_storage_account_type) - c.extra('resource_group_for_managed_disk', arg_type=resource_group_for_managed_disk_type) - c.ignore('destination_account_details') - - with self.argument_context('databox job update') as c: - c.argument('job_name', job_name_type) - c.argument('contact_name', help='Contact name of the person.', arg_group='Contact Details') - c.argument('phone', help='Phone number of the contact person.', arg_group='Contact Details') - c.argument('mobile', help='Mobile number of the contact person.', arg_group='Contact Details') - c.argument('email_list', help='List of Email addresses to be notified about job progress.', arg_group='Contact Details', nargs='+') - c.argument('street_address1', help='Street Address line 1.', arg_group='Shipping Address') - c.argument('street_address2', help='Street Address line 2.', arg_group='Shipping Address') - c.argument('street_address3', help='Street Address line 3.', arg_group='Shipping Address') - c.argument('city', help='Name of the City.', arg_group='Shipping Address') - c.argument('state_or_province', help='Name of the State or Province.', arg_group='Shipping Address') - c.argument('country', help='Name of the Country. Ex: US', arg_group='Shipping Address') - c.argument('postal_code', help='Postal code.', arg_group='Shipping Address') - c.argument('company_name', help='Name of the company.', arg_group='Shipping Address') - - with self.argument_context('databox job delete') as c: - c.argument('job_name', job_name_type) - - with self.argument_context('databox job show') as c: - c.argument('job_name', job_name_type) - - with self.argument_context('databox job cancel') as c: - c.argument('job_name', job_name_type) - c.argument('reason', help='Reason for cancellation.') - - with self.argument_context('databox job list-credentials') as c: - c.argument('job_name', job_name_type) diff --git a/src/databox/azext_databox/_validators.py b/src/databox/azext_databox/_validators.py deleted file mode 100644 index a4339304141..00000000000 --- a/src/databox/azext_databox/_validators.py +++ /dev/null @@ -1,85 +0,0 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- - -# pylint: disable=line-too-long -from azure.cli.core.commands.client_factory import get_subscription_id -from msrestazure.tools import resource_id - - -def validate_create_input_parameters(cmd, namespace): - _parse_storage_account_details(cmd, namespace) - _parse_managed_disk_details(cmd, namespace) - _validate_expected_data_size_for_databoxdisk(namespace) - _validate_destination_account_details(namespace) - - -def _parse_storage_account_details(cmd, namespace): - """Parse storage account details for destination.""" - from msrestazure.tools import is_valid_resource_id - - if not namespace.destination_account_details: - namespace.destination_account_details = [] - - if namespace.storage_accounts: - for storage_account in namespace.storage_accounts: - if storage_account and not is_valid_resource_id(storage_account): - storage_account = resource_id( - subscription=get_subscription_id(cmd.cli_ctx), - resource_group=namespace.resource_group_name, - namespace='Microsoft.Storage', - type='storageAccounts', - name=storage_account - ) - - if storage_account: - storage_account_details = {'storage_account_id': storage_account, - 'data_destination_type': 'StorageAccount'} - namespace.destination_account_details.append(storage_account_details) - - del namespace.storage_accounts - - -def _parse_managed_disk_details(cmd, namespace): - """Parse managed disk details for destination.""" - from msrestazure.tools import is_valid_resource_id - - if not namespace.destination_account_details: - namespace.destination_account_details = [] - - subscription = get_subscription_id(cmd.cli_ctx) - if namespace.staging_storage_account and not is_valid_resource_id(namespace.staging_storage_account): - namespace.staging_storage_account = resource_id( - subscription=subscription, - resource_group=namespace.resource_group_name, - namespace='Microsoft.Storage', - type='storageAccounts', - name=namespace.staging_storage_account - ) - - if namespace.resource_group_for_managed_disk and not is_valid_resource_id( - namespace.resource_group_for_managed_disk): - namespace.resource_group_for_managed_disk = '/subscriptions/' + subscription + '/resourceGroups/' + namespace.resource_group_for_managed_disk - - if namespace.staging_storage_account and namespace.resource_group_for_managed_disk: - managed_disk_details = {'staging_storage_account_id': namespace.staging_storage_account, - 'resource_group_id': namespace.resource_group_for_managed_disk, - 'data_destination_type': 'ManagedDisk'} - namespace.destination_account_details.append(managed_disk_details) - - del namespace.staging_storage_account - del namespace.resource_group_for_managed_disk - - -def _validate_expected_data_size_for_databoxdisk(namespace): - if namespace.sku == 'DataBoxDisk' and not namespace.expected_data_size: - raise ValueError( - "You must provide '--expected-data-size' when the 'sku' is 'DataBoxDisk'.") - - -def _validate_destination_account_details(namespace): - if not namespace.destination_account_details: - raise ValueError( - "You must provide at least one '--storage-account' or the combination of '--staging-storage-account' and " - "'--resource-group-for-managed-disk'") diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/__init__.py b/src/databox/azext_databox/action.py similarity index 60% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/__init__.py rename to src/databox/azext_databox/action.py index ae972ed54f8..d95d53bf711 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/__init__.py +++ b/src/databox/azext_databox/action.py @@ -1,19 +1,17 @@ -# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# # Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wildcard-import +# pylint: disable=unused-wildcard-import -from ._data_box_management_client import DataBoxManagementClient -from ._version import VERSION - -__version__ = VERSION -__all__ = ['DataBoxManagementClient'] - +from .generated.action import * # noqa: F403 try: - from ._patch import patch_sdk # type: ignore - patch_sdk() + from .manual.action import * # noqa: F403 except ImportError: pass diff --git a/src/databox/azext_databox/azext_metadata.json b/src/databox/azext_databox/azext_metadata.json index 587a1ed232f..cfc30c747c7 100644 --- a/src/databox/azext_databox/azext_metadata.json +++ b/src/databox/azext_databox/azext_metadata.json @@ -1,3 +1,4 @@ { - "azext.minCliCoreVersion": "2.3.1" + "azext.isExperimental": true, + "azext.minCliCoreVersion": "2.15.0" } \ No newline at end of file diff --git a/src/databox/azext_databox/commands.py b/src/databox/azext_databox/commands.py deleted file mode 100644 index a175550e86d..00000000000 --- a/src/databox/azext_databox/commands.py +++ /dev/null @@ -1,27 +0,0 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- - -# pylint: disable=line-too-long -# pylint: disable=too-many-lines -# pylint: disable=too-many-statements -# pylint: disable=too-many-locals -from azext_databox._validators import validate_create_input_parameters -from azure.cli.core.commands import CliCommandType - - -def load_command_table(self, _): - - from azext_databox._client_factory import cf_jobs - databox_jobs = CliCommandType( - operations_tmpl='azext_databox.vendored_sdks.databox.operations._jobs_operations#JobsOperations.{}', - client_factory=cf_jobs) - with self.command_group('databox job', databox_jobs, client_factory=cf_jobs) as g: - g.custom_command('create', 'create_databox_job', validator=validate_create_input_parameters) - g.custom_command('update', 'update_databox_job') - g.custom_command('delete', 'delete_databox_job', confirmation=True) - g.custom_show_command('show', 'get_databox_job') - g.custom_command('list', 'list_databox_job') - g.custom_command('cancel', 'cancel_databox_job', confirmation=True) - g.custom_command('list-credentials', 'list_credentials_databox_job') diff --git a/src/databox/azext_databox/custom.py b/src/databox/azext_databox/custom.py index 6c84c133626..dbe9d5f9742 100644 --- a/src/databox/azext_databox/custom.py +++ b/src/databox/azext_databox/custom.py @@ -1,135 +1,17 @@ -# -------------------------------------------------------------------------------------------- +# -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- -# pylint: disable=line-too-long -# pylint: disable=too-many-statements -# pylint: disable=too-many-lines -# pylint: disable=too-many-locals -# pylint: disable=unused-argument -# pylint: disable=too-many-branches - - -def create_databox_job(client, - resource_group_name, - job_name, - location, - sku, - contact_name, - phone, - city, - email_list, - street_address1, - postal_code, - country, - state_or_province, - destination_account_details, - expected_data_size=None, - tags=None, - mobile=None, - street_address2=None, - street_address3=None, - company_name=None,): - body = {} - body['location'] = location # str - body['tags'] = tags # dictionary - body.setdefault('sku', {})['name'] = sku # str - body.setdefault('details', {})['job_details_type'] = sku - body.setdefault('details', {})['expected_data_size_in_terabytes'] = expected_data_size - body.setdefault('details', {}).setdefault('contact_details', {})['contact_name'] = contact_name # str - body.setdefault('details', {}).setdefault('contact_details', {})['phone'] = phone # str - body.setdefault('details', {}).setdefault('contact_details', {})['mobile'] = mobile # str - body.setdefault('details', {}).setdefault('contact_details', {})['email_list'] = email_list - body.setdefault('details', {}).setdefault('shipping_address', {})['street_address1'] = street_address1 # str - body.setdefault('details', {}).setdefault('shipping_address', {})['street_address2'] = street_address2 # str - body.setdefault('details', {}).setdefault('shipping_address', {})['street_address3'] = street_address3 # str - body.setdefault('details', {}).setdefault('shipping_address', {})['city'] = city # str - body.setdefault('details', {}).setdefault('shipping_address', {})['state_or_province'] = state_or_province # str - body.setdefault('details', {}).setdefault('shipping_address', {})['country'] = country # str - body.setdefault('details', {}).setdefault('shipping_address', {})['postal_code'] = postal_code # str - body.setdefault('details', {}).setdefault('shipping_address', {})['company_name'] = company_name # str - - body.setdefault('details', {})['destination_account_details'] = destination_account_details - - return client.begin_create(resource_group_name=resource_group_name, job_name=job_name, job_resource=body) - - -def update_databox_job(client, - resource_group_name, - job_name, - contact_name=None, - phone=None, - email_list=None, - street_address1=None, - postal_code=None, - country=None, - mobile=None, - city=None, - street_address2=None, - street_address3=None, - state_or_province=None, - company_name=None): - job_resource = get_databox_job(client, resource_group_name, job_name) - job_details = job_resource.details - contact_details = job_details.contact_details - shipping_address = job_details.shipping_address - - body = {} - body.setdefault('details', {}).setdefault('contact_details', {})[ - 'contact_name'] = contact_details.contact_name if contact_name is None else contact_name # str - body.setdefault('details', {}).setdefault('contact_details', {})[ - 'phone'] = contact_details.phone if phone is None else phone # str - body.setdefault('details', {}).setdefault('contact_details', {})[ - 'mobile'] = contact_details.mobile if mobile is None else mobile # str - body.setdefault('details', {}).setdefault('contact_details', {})[ - 'email_list'] = contact_details.email_list if email_list is None else email_list - body.setdefault('details', {}).setdefault('shipping_address', {})[ - 'street_address1'] = shipping_address.street_address1 if street_address1 is None else street_address1 # str - body.setdefault('details', {}).setdefault('shipping_address', {})[ - 'street_address2'] = shipping_address.street_address2 if street_address2 is None else street_address2 # str - body.setdefault('details', {}).setdefault('shipping_address', {})[ - 'street_address3'] = shipping_address.street_address3 if street_address3 is None else street_address3 # str - body.setdefault('details', {}).setdefault('shipping_address', {})[ - 'city'] = shipping_address.city if city is None else city # str - body.setdefault('details', {}).setdefault('shipping_address', {})[ - 'state_or_province'] = shipping_address.state_or_province if state_or_province is None else state_or_province # str - body.setdefault('details', {}).setdefault('shipping_address', {})[ - 'country'] = shipping_address.country if country is None else country # str - body.setdefault('details', {}).setdefault('shipping_address', {})[ - 'postal_code'] = shipping_address.postal_code if postal_code is None else postal_code # str - body.setdefault('details', {}).setdefault('shipping_address', {})[ - 'company_name'] = shipping_address.company_name if company_name is None else company_name # str - - return client.begin_update(resource_group_name=resource_group_name, job_name=job_name, job_resource_update_parameter=body) - - -def delete_databox_job(client, - resource_group_name, - job_name): - return client.begin_delete(resource_group_name=resource_group_name, job_name=job_name) - - -def get_databox_job(client, - resource_group_name, - job_name): - return client.get(resource_group_name=resource_group_name, job_name=job_name, expand='details') - - -def list_databox_job(client, - resource_group_name=None): - if resource_group_name is not None: - return client.list_by_resource_group(resource_group_name=resource_group_name) - return client.list() - - -def cancel_databox_job(client, - resource_group_name, - job_name, - reason): - return client.cancel(resource_group_name=resource_group_name, job_name=job_name, cancellation_reason={'reason': reason}) - - -def list_credentials_databox_job(client, - resource_group_name, - job_name): - return client.list_credentials(resource_group_name=resource_group_name, job_name=job_name) +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wildcard-import +# pylint: disable=unused-wildcard-import + +from .generated.custom import * # noqa: F403 +try: + from .manual.custom import * # noqa: F403 +except ImportError: + pass diff --git a/src/databox/azext_databox/vendored_sdks/databox/_version.py b/src/databox/azext_databox/generated/__init__.py similarity index 77% rename from src/databox/azext_databox/vendored_sdks/databox/_version.py rename to src/databox/azext_databox/generated/__init__.py index c47f66669f1..c9cfdc73e77 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/_version.py +++ b/src/databox/azext_databox/generated/__init__.py @@ -1,9 +1,12 @@ # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# # Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. # -------------------------------------------------------------------------- -VERSION = "1.0.0" +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/src/databox/azext_databox/generated/_client_factory.py b/src/databox/azext_databox/generated/_client_factory.py new file mode 100644 index 00000000000..58b4176b5b9 --- /dev/null +++ b/src/databox/azext_databox/generated/_client_factory.py @@ -0,0 +1,24 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + + +def cf_databox_cl(cli_ctx, *_): + from azure.cli.core.commands.client_factory import get_mgmt_service_client + from azext_databox.vendored_sdks.databox import DataBoxManagementClient + return get_mgmt_service_client(cli_ctx, + DataBoxManagementClient) + + +def cf_job(cli_ctx, *_): + return cf_databox_cl(cli_ctx).jobs + + +def cf_service(cli_ctx, *_): + return cf_databox_cl(cli_ctx).service diff --git a/src/databox/azext_databox/generated/_help.py b/src/databox/azext_databox/generated/_help.py new file mode 100644 index 00000000000..3ba73c6f7d9 --- /dev/null +++ b/src/databox/azext_databox/generated/_help.py @@ -0,0 +1,484 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=too-many-lines + +from knack.help_files import helps + + +helps['databox job'] = """ + type: group + short-summary: Manage job with databox +""" + +helps['databox job list'] = """ + type: command + short-summary: "Lists all the jobs available under the given resource group. And Lists all the jobs available \ +under the subscription." + examples: + - name: JobsListByResourceGroup + text: |- + az databox job list --resource-group "SdkRg5154" + - name: JobsList + text: |- + az databox job list +""" + +helps['databox job show'] = """ + type: command + short-summary: "Gets information about the specified job." + examples: + - name: JobsGet + text: |- + az databox job show --expand "details" --name "SdkJob952" --resource-group "SdkRg5154" + - name: JobsGetCmk + text: |- + az databox job show --expand "details" --name "SdkJob1735" --resource-group "SdkRg7937" + - name: JobsGetCopyStuck + text: |- + az databox job show --expand "details" --name "TJx-637505258985313014" --resource-group \ +"dmstestresource" + - name: JobsGetExport + text: |- + az databox job show --expand "details" --name "SdkJob6429" --resource-group "SdkRg8091" + - name: JobsGetWaitingForAction + text: |- + az databox job show --expand "details" --name "TJx-637505258985313014" --resource-group \ +"dmstestresource" +""" + +helps['databox job create'] = """ + type: command + short-summary: "Creates a new job with the specified parameters. Existing job cannot be updated with this API and \ +should instead be updated with the Update job API." + parameters: + - name: --sku + short-summary: "The sku type." + long-summary: | + Usage: --sku name=XX display-name=XX family=XX + + name: Required. The sku name. + display-name: The display name of the sku. + family: The sku family. + examples: + - name: JobsCreate + text: |- + az databox job create --name "SdkJob952" --location "westus" --transfer-type "ImportToAzure" --details \ +"{\\"contactDetails\\":{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@microsoft.com\\"],\\"phone\ +\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"},\\"dataImportDetails\\":[{\\"accountDetails\\":{\\"dataAccountTyp\ +e\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourcegroups/\ +databoxbvt/providers/Microsoft.Storage/storageAccounts/databoxbvttestaccount\\"}}],\\"jobDetailsType\\":\\"DataBox\\",\ +\\"shippingAddress\\":{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\ +\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 \ +TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit 1\\"}}" --sku name="DataBox" --resource-group "SdkRg5154" + - name: JobsCreateDevicePassword + text: |- + az databox job create --name "SdkJob9640" --location "westus" --transfer-type "ImportToAzure" --details \ +"{\\"contactDetails\\":{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@microsoft.com\\"],\\"phone\ +\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"},\\"dataImportDetails\\":[{\\"accountDetails\\":{\\"dataAccountTyp\ +e\\":\\"StorageAccount\\",\\"sharePassword\\":\\"\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8\ +ff7-4a25-95c7-ce9da541242f/resourceGroups/databoxbvt1/providers/Microsoft.Storage/storageAccounts/databoxbvttestaccount\ +2\\"}}],\\"devicePassword\\":\\"\\",\\"jobDetailsType\\":\\"DataBox\\",\\"shippingAddress\\":{\\"addres\ +sType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"po\ +stalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND \ +ST\\",\\"streetAddress2\\":\\"Unit 1\\"}}" --sku name="DataBox" --resource-group "SdkRg7478" + - name: JobsCreateDoubleEncryption + text: |- + az databox job create --name "SdkJob6599" --location "westus" --transfer-type "ImportToAzure" --details \ +"{\\"contactDetails\\":{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@microsoft.com\\"],\\"phone\ +\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"},\\"dataImportDetails\\":[{\\"accountDetails\\":{\\"dataAccountTyp\ +e\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourcegroups/\ +databoxbvt/providers/Microsoft.Storage/storageAccounts/databoxbvttestaccount\\"}}],\\"jobDetailsType\\":\\"DataBox\\",\ +\\"preferences\\":{\\"encryptionPreferences\\":{\\"doubleEncryption\\":\\"Enabled\\"}},\\"shippingAddress\\":{\\"addres\ +sType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"po\ +stalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND \ +ST\\",\\"streetAddress2\\":\\"Unit 1\\"}}" --sku name="DataBox" --resource-group "SdkRg608" + - name: JobsCreateExport + text: |- + az databox job create --name "SdkJob6429" --location "westus" --transfer-type "ExportFromAzure" \ +--details "{\\"contactDetails\\":{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@microsoft.com\\"]\ +,\\"phone\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"},\\"dataExportDetails\\":[{\\"accountDetails\\":{\\"dataA\ +ccountType\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resour\ +ceGroups/akvenkat/providers/Microsoft.Storage/storageAccounts/aaaaaa2\\"},\\"transferConfiguration\\":{\\"transferAllDe\ +tails\\":{\\"include\\":{\\"dataAccountType\\":\\"StorageAccount\\",\\"transferAllBlobs\\":true,\\"transferAllFiles\\":\ +true}},\\"transferConfigurationType\\":\\"TransferAll\\"}}],\\"jobDetailsType\\":\\"DataBox\\",\\"shippingAddress\\":{\ +\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\ +\\",\\"postalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND \ +ST\\",\\"streetAddress2\\":\\"Unit 1\\"}}" --sku name="DataBox" --resource-group "SdkRg8091" + - name: JobsCreateWithUserAssignedIdentity + text: |- + az databox job create --name "SdkJob5337" --type "UserAssigned" --user-assigned-identities \ +"{\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/akvenkat/providers/Microsoft.ManagedIdentity/us\ +erAssignedIdentities/sdkIdentity\\":{}}" --location "westus" --transfer-type "ImportToAzure" --details \ +"{\\"contactDetails\\":{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@microsoft.com\\"],\\"phone\ +\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"},\\"dataImportDetails\\":[{\\"accountDetails\\":{\\"dataAccountTyp\ +e\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/\ +databoxbvt1/providers/Microsoft.Storage/storageAccounts/databoxbvttestaccount2\\"}}],\\"jobDetailsType\\":\\"DataBox\\"\ +,\\"shippingAddress\\":{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsof\ +t\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 \ +TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit 1\\"}}" --sku name="DataBox" --resource-group "SdkRg7552" +""" + +helps['databox job update'] = """ + type: command + short-summary: "Updates the properties of an existing job." + parameters: + - name: --contact-details + short-summary: "Contact details for notification and shipping." + long-summary: | + Usage: --contact-details contact-name=XX phone=XX phone-extension=XX mobile=XX email-list=XX \ +notification-preference=XX + + contact-name: Required. Contact name of the person. + phone: Required. Phone number of the contact person. + phone-extension: Phone extension number of the contact person. + mobile: Mobile number of the contact person. + email-list: Required. List of Email-ids to be notified about job progress. + notification-preference: Notification preference for a job stage. + - name: --shipping-address + short-summary: "Shipping address of the customer." + long-summary: | + Usage: --shipping-address street-address1=XX street-address2=XX street-address3=XX city=XX \ +state-or-province=XX country=XX postal-code=XX zip-extended-code=XX company-name=XX address-type=XX + + street-address1: Required. Street Address line 1. + street-address2: Street Address line 2. + street-address3: Street Address line 3. + city: Name of the City. + state-or-province: Name of the State or Province. + country: Required. Name of the Country. + postal-code: Postal code. + zip-extended-code: Extended Zip Code. + company-name: Name of the company. + address-type: Type of address. + - name: --return-package-details + short-summary: "Return package details of job. This is applicable only for customer disk sku" + long-summary: | + Usage: --return-package-details carrier-account-number=XX carrier-name=XX tracking-id=XX + + carrier-account-number: Carrier Account Number of customer for customer disk. + carrier-name: Name of the carrier. + tracking-id: Tracking Id of shipment. + examples: + - name: JobsPatch + text: |- + az databox job update --name "SdkJob952" --contact-details contact-name="Update Job" \ +email-list="testing@microsoft.com" phone="1234567890" phone-extension="1234" --shipping-address \ +address-type="Commercial" city="San Francisco" company-name="Microsoft" country="US" postal-code="94107" \ +state-or-province="CA" street-address1="16 TOWNSEND ST" street-address2="Unit 1" --resource-group "SdkRg5154" + - name: JobsPatchCmk + text: |- + az databox job update --name "SdkJob1735" --key-encryption-key "{\\"kekType\\":\\"CustomerManaged\\",\\"\ +kekUrl\\":\\"https://sdkkeyvault.vault.azure.net/keys/SSDKEY/\\",\\"kekVaultResourceID\\":\\"/subscriptions/fa68082f-8f\ +f7-4a25-95c7-ce9da541242f/resourceGroups/akvenkat/providers/Microsoft.KeyVault/vaults/SDKKeyVault\\"}" \ +--resource-group "SdkRg7937" + - name: JobsPatchSystemAssignedToUserAssigned + text: |- + az databox job update --name "SdkJob2965" --type "SystemAssigned,UserAssigned" \ +--user-assigned-identities "{\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/akvenkat/providers/M\ +icrosoft.ManagedIdentity/userAssignedIdentities/sdkIdentity\\":{}}" --key-encryption-key \ +"{\\"identityProperties\\":{\\"type\\":\\"UserAssigned\\",\\"userAssigned\\":{\\"resourceId\\":\\"/subscriptions/fa6808\ +2f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/akvenkat/providers/Microsoft.ManagedIdentity/userAssignedIdentities/sdkId\ +entity\\"}},\\"kekType\\":\\"CustomerManaged\\",\\"kekUrl\\":\\"https://sdkkeyvault.vault.azure.net/keys/SSDKEY/\\",\\"\ +kekVaultResourceID\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/akvenkat/providers/Microsof\ +t.KeyVault/vaults/SDKKeyVault\\"}" --resource-group "SdkRg9765" +""" + +helps['databox job delete'] = """ + type: command + short-summary: "Deletes a job." + examples: + - name: JobsDelete + text: |- + az databox job delete --name "SdkJob952" --resource-group "SdkRg5154" +""" + +helps['databox job book-shipment-pick-up'] = """ + type: command + short-summary: "Book shipment pick up." + examples: + - name: BookShipmentPickupPost + text: |- + az databox job book-shipment-pick-up --name "TJ-636646322037905056" --resource-group "bvttoolrg6" \ +--end-time "2019-09-22T18:30:00Z" --shipment-location "Front desk" --start-time "2019-09-20T18:30:00Z" +""" + +helps['databox job cancel'] = """ + type: command + short-summary: "CancelJob." + examples: + - name: JobsCancelPost + text: |- + az databox job cancel --reason "CancelTest" --name "SdkJob952" --resource-group "SdkRg5154" +""" + +helps['databox job list-credentials'] = """ + type: command + short-summary: "This method gets the unencrypted secrets related to the job." + examples: + - name: JobsListCredentials + text: |- + az databox job list-credentials --name "TJ-636646322037905056" --resource-group "bvttoolrg6" +""" + +helps['databox job mark-device-shipped'] = """ + type: command + short-summary: "Request to mark devices for a given job as shipped." + parameters: + - name: --delivery-package-details + short-summary: "Delivery package details" + long-summary: | + Usage: --delivery-package-details carrier-name=XX tracking-id=XX + + carrier-name: Name of the carrier. + tracking-id: Tracking Id of shipment. + examples: + - name: MarkDevicesShipped + text: |- + az databox job mark-device-shipped --name "SdkJob8367" --delivery-package-details carrier-name="DHL" \ +tracking-id="123456" --resource-group "SdkRg9836" +""" + +helps['databox job wait'] = """ + type: command + short-summary: Place the CLI in a waiting state until a condition of the databox job is met. + examples: + - name: Pause executing next line of CLI script until the databox job is successfully created. + text: |- + az databox job wait --expand "details" --name "TJx-637505258985313014" --resource-group \ +"dmstestresource" --created + - name: Pause executing next line of CLI script until the databox job is successfully updated. + text: |- + az databox job wait --expand "details" --name "TJx-637505258985313014" --resource-group \ +"dmstestresource" --updated + - name: Pause executing next line of CLI script until the databox job is successfully deleted. + text: |- + az databox job wait --expand "details" --name "TJx-637505258985313014" --resource-group \ +"dmstestresource" --deleted +""" + +helps['databox'] = """ + type: group + short-summary: Manage with databox +""" + +helps['databox mitigate'] = """ + type: command + short-summary: "Request to mitigate for a given job." + examples: + - name: Mitigate + text: |- + az databox mitigate --job-name "SdkJob8367" --customer-resolution-code "MoveToCleanUpDevice" \ +--resource-group "SdkRg9836" +""" + +helps['databox service'] = """ + type: group + short-summary: Manage service with databox +""" + +helps['databox service list-available-sku-by-resource-group'] = """ + type: command + short-summary: "This method provides the list of available skus for the given subscription, resource group and \ +location." + examples: + - name: AvailableSkusPost + text: |- + az databox service list-available-sku-by-resource-group --country "US" --available-sku-request-location \ +"westus" --transfer-type "ImportToAzure" --location "westus" --resource-group "bvttoolrg6" +""" + +helps['databox service region-configuration'] = """ + type: command + short-summary: "This API provides configuration details specific to given region/location at Subscription level." + parameters: + - name: --data-box-schedule-availability-request + short-summary: "Request body to get the availability for scheduling data box orders orders." + long-summary: | + Usage: --data-box-schedule-availability-request storage-location=XX sku-name=XX country=XX + + storage-location: Required. Location for data transfer. For locations check: \ +https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01 + sku-name: Required. Sku Name for which the order is to be scheduled. + country: Country in which storage location should be supported. + - name: --disk-schedule-availability-request + short-summary: "Request body to get the availability for scheduling disk orders." + long-summary: | + Usage: --disk-schedule-availability-request expected-data-size-in-tera-bytes=XX storage-location=XX \ +sku-name=XX country=XX + + expected-data-size-in-tera-bytes: Required. The expected size of the data, which needs to be transferred \ +in this job, in terabytes. + storage-location: Required. Location for data transfer. For locations check: \ +https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01 + sku-name: Required. Sku Name for which the order is to be scheduled. + country: Country in which storage location should be supported. + - name: --heavy-schedule-availability-request + short-summary: "Request body to get the availability for scheduling heavy orders." + long-summary: | + Usage: --heavy-schedule-availability-request storage-location=XX sku-name=XX country=XX + + storage-location: Required. Location for data transfer. For locations check: \ +https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01 + sku-name: Required. Sku Name for which the order is to be scheduled. + country: Country in which storage location should be supported. + - name: --datacenter-address-request + short-summary: "Request body to get the datacenter address ." + long-summary: | + Usage: --datacenter-address-request storage-location=XX sku-name=XX + + storage-location: Required. Storage location. For locations check: https://management.azure.com/subscriptio\ +ns/SUBSCRIPTIONID/locations?api-version=2018-01-01 + sku-name: Required. Sku Name for which the data center address requested. + examples: + - name: RegionConfiguration + text: |- + az databox service region-configuration --location "westus" --schedule-availability-request \ +"{\\"skuName\\":\\"DataBox\\",\\"storageLocation\\":\\"westus\\"}" +""" + +helps['databox service region-configuration-by-resource-group'] = """ + type: command + short-summary: "This API provides configuration details specific to given region/location at Resource group \ +level." + parameters: + - name: --data-box-schedule-availability-request + short-summary: "Request body to get the availability for scheduling data box orders orders." + long-summary: | + Usage: --data-box-schedule-availability-request storage-location=XX sku-name=XX country=XX + + storage-location: Required. Location for data transfer. For locations check: \ +https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01 + sku-name: Required. Sku Name for which the order is to be scheduled. + country: Country in which storage location should be supported. + - name: --disk-schedule-availability-request + short-summary: "Request body to get the availability for scheduling disk orders." + long-summary: | + Usage: --disk-schedule-availability-request expected-data-size-in-tera-bytes=XX storage-location=XX \ +sku-name=XX country=XX + + expected-data-size-in-tera-bytes: Required. The expected size of the data, which needs to be transferred \ +in this job, in terabytes. + storage-location: Required. Location for data transfer. For locations check: \ +https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01 + sku-name: Required. Sku Name for which the order is to be scheduled. + country: Country in which storage location should be supported. + - name: --heavy-schedule-availability-request + short-summary: "Request body to get the availability for scheduling heavy orders." + long-summary: | + Usage: --heavy-schedule-availability-request storage-location=XX sku-name=XX country=XX + + storage-location: Required. Location for data transfer. For locations check: \ +https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01 + sku-name: Required. Sku Name for which the order is to be scheduled. + country: Country in which storage location should be supported. + - name: --datacenter-address-request + short-summary: "Request body to get the datacenter address ." + long-summary: | + Usage: --datacenter-address-request storage-location=XX sku-name=XX + + storage-location: Required. Storage location. For locations check: https://management.azure.com/subscriptio\ +ns/SUBSCRIPTIONID/locations?api-version=2018-01-01 + sku-name: Required. Sku Name for which the data center address requested. + examples: + - name: RegionConfigurationByResourceGroup + text: |- + az databox service region-configuration-by-resource-group --location "westus" \ +--schedule-availability-request "{\\"skuName\\":\\"DataBox\\",\\"storageLocation\\":\\"westus\\"}" --resource-group \ +"SdkRg4981" +""" + +helps['databox service validate-address'] = """ + type: command + short-summary: "[DEPRECATED NOTICE: This operation will soon be removed]. This method validates the customer \ +shipping address and provide alternate addresses if any." + parameters: + - name: --shipping-address + short-summary: "Shipping address of the customer." + long-summary: | + Usage: --shipping-address street-address1=XX street-address2=XX street-address3=XX city=XX \ +state-or-province=XX country=XX postal-code=XX zip-extended-code=XX company-name=XX address-type=XX + + street-address1: Required. Street Address line 1. + street-address2: Street Address line 2. + street-address3: Street Address line 3. + city: Name of the City. + state-or-province: Name of the State or Province. + country: Required. Name of the Country. + postal-code: Postal code. + zip-extended-code: Extended Zip Code. + company-name: Name of the company. + address-type: Type of address. + examples: + - name: ValidateAddressPost + text: |- + az databox service validate-address --location "westus" --device-type "DataBox" --shipping-address \ +address-type="Commercial" city="San Francisco" company-name="Microsoft" country="US" postal-code="94107" \ +state-or-province="CA" street-address1="16 TOWNSEND ST" street-address2="Unit 1" --validation-type "ValidateAddress" +""" + +helps['databox service validate-input'] = """ + type: command + short-summary: "This method does all necessary pre-job creation validation under subscription." + parameters: + - name: --create-job-validations + short-summary: "It does all pre-job creation validations." + long-summary: | + Usage: --create-job-validations individual-request-details=XX + + individual-request-details: Required. List of request details contain validationType and its request as \ +key and value respectively. + examples: + - name: ValidateInputs + text: |- + az databox service validate-input --location "westus" --validation-request \ +"{\\"individualRequestDetails\\":[{\\"dataImportDetails\\":[{\\"accountDetails\\":{\\"dataAccountType\\":\\"StorageAcco\ +unt\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourcegroups/databoxbvt/provider\ +s/Microsoft.Storage/storageAccounts/databoxbvttestaccount\\"}}],\\"deviceType\\":\\"DataBox\\",\\"transferType\\":\\"Im\ +portToAzure\\",\\"validationType\\":\\"ValidateDataTransferDetails\\"},{\\"deviceType\\":\\"DataBox\\",\\"shippingAddre\ +ss\\":{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\\ +":\\"US\\",\\"postalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND \ +ST\\",\\"streetAddress2\\":\\"Unit 1\\"},\\"transportPreferences\\":{\\"preferredShipmentType\\":\\"MicrosoftManaged\\"\ +},\\"validationType\\":\\"ValidateAddress\\"},{\\"validationType\\":\\"ValidateSubscriptionIsAllowedToCreateJob\\"},{\\\ +"country\\":\\"US\\",\\"deviceType\\":\\"DataBox\\",\\"location\\":\\"westus\\",\\"transferType\\":\\"ImportToAzure\\",\ +\\"validationType\\":\\"ValidateSkuAvailability\\"},{\\"deviceType\\":\\"DataBox\\",\\"validationType\\":\\"ValidateCre\ +ateOrderLimit\\"},{\\"deviceType\\":\\"DataBox\\",\\"preference\\":{\\"transportPreferences\\":{\\"preferredShipmentTyp\ +e\\":\\"MicrosoftManaged\\"}},\\"validationType\\":\\"ValidatePreferences\\"}],\\"validationCategory\\":\\"JobCreationV\ +alidation\\"}" +""" + +helps['databox service validate-input-by-resource-group'] = """ + type: command + short-summary: "This method does all necessary pre-job creation validation under resource group." + parameters: + - name: --create-job-validations + short-summary: "It does all pre-job creation validations." + long-summary: | + Usage: --create-job-validations individual-request-details=XX + + individual-request-details: Required. List of request details contain validationType and its request as \ +key and value respectively. + examples: + - name: ValidateInputsByResourceGroup + text: |- + az databox service validate-input-by-resource-group --location "westus" --resource-group "SdkRg6861" \ +--validation-request "{\\"individualRequestDetails\\":[{\\"dataImportDetails\\":[{\\"accountDetails\\":{\\"dataAccountT\ +ype\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourcegroup\ +s/databoxbvt/providers/Microsoft.Storage/storageAccounts/databoxbvttestaccount\\"}}],\\"deviceType\\":\\"DataBox\\",\\"\ +transferType\\":\\"ImportToAzure\\",\\"validationType\\":\\"ValidateDataTransferDetails\\"},{\\"deviceType\\":\\"DataBo\ +x\\",\\"shippingAddress\\":{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Micr\ +osoft\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 \ +TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit 1\\"},\\"transportPreferences\\":{\\"preferredShipmentType\\":\\"MicrosoftM\ +anaged\\"},\\"validationType\\":\\"ValidateAddress\\"},{\\"validationType\\":\\"ValidateSubscriptionIsAllowedToCreateJo\ +b\\"},{\\"country\\":\\"US\\",\\"deviceType\\":\\"DataBox\\",\\"location\\":\\"westus\\",\\"transferType\\":\\"ImportTo\ +Azure\\",\\"validationType\\":\\"ValidateSkuAvailability\\"},{\\"deviceType\\":\\"DataBox\\",\\"validationType\\":\\"Va\ +lidateCreateOrderLimit\\"},{\\"deviceType\\":\\"DataBox\\",\\"preference\\":{\\"transportPreferences\\":{\\"preferredSh\ +ipmentType\\":\\"MicrosoftManaged\\"}},\\"validationType\\":\\"ValidatePreferences\\"}],\\"validationCategory\\":\\"Job\ +CreationValidation\\"}" +""" diff --git a/src/databox/azext_databox/generated/_params.py b/src/databox/azext_databox/generated/_params.py new file mode 100644 index 00000000000..5100f303bcc --- /dev/null +++ b/src/databox/azext_databox/generated/_params.py @@ -0,0 +1,219 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=too-many-lines +# pylint: disable=too-many-statements + +from azure.cli.core.commands.parameters import ( + tags_type, + get_enum_type, + resource_group_name_type, + get_location_type +) +from azure.cli.core.commands.validators import ( + get_default_location_from_resource_group, + validate_file_or_dict +) +from azext_databox.action import ( + AddSku, + AddContactDetails, + AddShippingAddress, + AddReturnPackageDetails, + AddDeliveryPackageDetails, + AddDataBoxScheduleAvailabilityRequest, + AddDiskScheduleAvailabilityRequest, + AddHeavyScheduleAvailabilityRequest, + AddDatacenterAddressRequest, + AddCreateJobValidations +) + + +def load_arguments(self, _): + + with self.argument_context('databox job list') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('skip_token', type=str, help='$skipToken is supported on Get list of jobs, which provides the next ' + 'page in the list of jobs.') + + with self.argument_context('databox job show') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('job_name', options_list=['--name', '-n', '--job-name'], type=str, help='The name of the job ' + 'Resource within the specified resource group. job names must be between 3 and 24 characters in ' + 'length and use any alphanumeric and underscore only', id_part='name') + c.argument('expand', type=str, help='$expand is supported on details parameter for job, which provides details ' + 'on the job stages.') + + with self.argument_context('databox job create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('job_name', options_list=['--name', '-n', '--job-name'], type=str, help='The name of the job ' + 'Resource within the specified resource group. job names must be between 3 and 24 characters in ' + 'length and use any alphanumeric and underscore only') + c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, + validator=get_default_location_from_resource_group) + c.argument('tags', tags_type) + c.argument('sku', action=AddSku, nargs='+', help='The sku type.') + c.argument('type_', options_list=['--type'], type=str, help='Identity type', arg_group='Identity') + c.argument('user_assigned_identities', type=validate_file_or_dict, help='User Assigned Identities Expected ' + 'value: json-string/@json-file.', arg_group='Identity') + c.argument('transfer_type', arg_type=get_enum_type(['ImportToAzure', 'ExportFromAzure']), help='Type of the ' + 'data transfer.') + c.argument('details', type=validate_file_or_dict, help='Details of a job run. This field will only be sent for ' + 'expand details filter. Expected value: json-string/@json-file.') + c.argument('delivery_type', arg_type=get_enum_type(['NonScheduled', 'Scheduled']), + help='Delivery type of Job.') + c.argument('scheduled_date_time', help='Scheduled date time.', arg_group='Delivery Info') + + with self.argument_context('databox job update') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('job_name', options_list=['--name', '-n', '--job-name'], type=str, help='The name of the job ' + 'Resource within the specified resource group. job names must be between 3 and 24 characters in ' + 'length and use any alphanumeric and underscore only', id_part='name') + c.argument('if_match', type=str, help='Defines the If-Match condition. The patch will be performed only if the ' + 'ETag of the job on the server matches this value.') + c.argument('tags', tags_type) + c.argument('contact_details', action=AddContactDetails, nargs='+', help='Contact details for notification and ' + 'shipping.', arg_group='Details') + c.argument('shipping_address', action=AddShippingAddress, nargs='+', help='Shipping address of the customer.', + arg_group='Details') + c.argument('key_encryption_key', type=validate_file_or_dict, help='Key encryption key for the job. Expected ' + 'value: json-string/@json-file.', arg_group='Details') + c.argument('return_package_details', action=AddReturnPackageDetails, nargs='+', help='Return package details ' + 'of job. This is applicable only for customer disk sku', arg_group='Details') + c.argument('type_', options_list=['--type'], type=str, help='Identity type', arg_group='Identity') + c.argument('user_assigned_identities', type=validate_file_or_dict, help='User Assigned Identities Expected ' + 'value: json-string/@json-file.', arg_group='Identity') + + with self.argument_context('databox job delete') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('job_name', options_list=['--name', '-n', '--job-name'], type=str, help='The name of the job ' + 'Resource within the specified resource group. job names must be between 3 and 24 characters in ' + 'length and use any alphanumeric and underscore only', id_part='name') + + with self.argument_context('databox job book-shipment-pick-up') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('job_name', options_list=['--name', '-n', '--job-name'], type=str, help='The name of the job ' + 'Resource within the specified resource group. job names must be between 3 and 24 characters in ' + 'length and use any alphanumeric and underscore only', id_part='name') + c.argument('start_time', help='Minimum date after which the pick up should commence, this must be in local ' + 'time of pick up area.') + c.argument('end_time', help='Maximum date before which the pick up should commence, this must be in local time ' + 'of pick up area.') + c.argument('shipment_location', type=str, help='Shipment Location in the pickup place. Eg.front desk') + + with self.argument_context('databox job cancel') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('job_name', options_list=['--name', '-n', '--job-name'], type=str, help='The name of the job ' + 'Resource within the specified resource group. job names must be between 3 and 24 characters in ' + 'length and use any alphanumeric and underscore only', id_part='name') + c.argument('reason', type=str, help='Reason for cancellation.') + + with self.argument_context('databox job list-credentials') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('job_name', options_list=['--name', '-n', '--job-name'], type=str, help='The name of the job ' + 'Resource within the specified resource group. job names must be between 3 and 24 characters in ' + 'length and use any alphanumeric and underscore only') + + with self.argument_context('databox job mark-device-shipped') as c: + c.argument('job_name', options_list=['--name', '-n', '--job-name'], type=str, help='The name of the job ' + 'Resource within the specified resource group. job names must be between 3 and 24 characters in ' + 'length and use any alphanumeric and underscore only', id_part='name') + c.argument('resource_group_name', resource_group_name_type) + c.argument('delivery_package_details', action=AddDeliveryPackageDetails, nargs='+', help='Delivery package ' + 'details') + + with self.argument_context('databox job wait') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('job_name', options_list=['--name', '-n', '--job-name'], type=str, help='The name of the job ' + 'Resource within the specified resource group. job names must be between 3 and 24 characters in ' + 'length and use any alphanumeric and underscore only', id_part='name') + c.argument('expand', type=str, help='$expand is supported on details parameter for job, which provides details ' + 'on the job stages.') + + with self.argument_context('databox mitigate') as c: + c.argument('job_name', type=str, help='The name of the job Resource within the specified resource group. job ' + 'names must be between 3 and 24 characters in length and use any alphanumeric and underscore only', + id_part='name') + c.argument('resource_group_name', resource_group_name_type) + c.argument('customer_resolution_code', arg_type=get_enum_type(['None', 'MoveToCleanUpDevice', 'Resume']), + help='Resolution code for the job') + + with self.argument_context('databox service list-available-sku-by-resource-group') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, + validator=get_default_location_from_resource_group) + c.argument('transfer_type', arg_type=get_enum_type(['ImportToAzure', 'ExportFromAzure']), help='Type of the ' + 'transfer.') + c.argument('country', type=str, help='ISO country code. Country for hardware shipment. For codes check: ' + 'https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements') + c.argument('available_sku_request_location', type=str, help='Location for data transfer. For locations check: ' + 'https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01') + c.argument('sku_names', nargs='+', help='Sku Names to filter for available skus') + + with self.argument_context('databox service region-configuration') as c: + c.argument('location', arg_type=get_location_type(self.cli_ctx), id_part='name') + c.argument('data_box_schedule_availability_request', action=AddDataBoxScheduleAvailabilityRequest, nargs='+', + help='Request body to get the availability for scheduling data box orders orders.', + arg_group='ScheduleAvailabilityRequest') + c.argument('disk_schedule_availability_request', action=AddDiskScheduleAvailabilityRequest, nargs='+', + help='Request body to get the availability for scheduling disk orders.', + arg_group='ScheduleAvailabilityRequest') + c.argument('heavy_schedule_availability_request', action=AddHeavyScheduleAvailabilityRequest, nargs='+', + help='Request body to get the availability for scheduling heavy orders.', + arg_group='ScheduleAvailabilityRequest') + c.argument('datacenter_address_request', action=AddDatacenterAddressRequest, nargs='+', help='Request body to ' + 'get the datacenter address .') + c.argument('sku_name', arg_type=get_enum_type(['DataBox', 'DataBoxDisk', 'DataBoxHeavy', + 'DataBoxCustomerDisk']), help='Type of the device.', + arg_group='Transport Availability Request') + + with self.argument_context('databox service region-configuration-by-resource-group') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, + validator=get_default_location_from_resource_group, id_part='name') + c.argument('data_box_schedule_availability_request', action=AddDataBoxScheduleAvailabilityRequest, nargs='+', + help='Request body to get the availability for scheduling data box orders orders.', + arg_group='ScheduleAvailabilityRequest') + c.argument('disk_schedule_availability_request', action=AddDiskScheduleAvailabilityRequest, nargs='+', + help='Request body to get the availability for scheduling disk orders.', + arg_group='ScheduleAvailabilityRequest') + c.argument('heavy_schedule_availability_request', action=AddHeavyScheduleAvailabilityRequest, nargs='+', + help='Request body to get the availability for scheduling heavy orders.', + arg_group='ScheduleAvailabilityRequest') + c.argument('datacenter_address_request', action=AddDatacenterAddressRequest, nargs='+', help='Request body to ' + 'get the datacenter address .') + c.argument('sku_name', arg_type=get_enum_type(['DataBox', 'DataBoxDisk', 'DataBoxHeavy', + 'DataBoxCustomerDisk']), help='Type of the device.', + arg_group='Transport Availability Request') + + with self.argument_context('databox service validate-address') as c: + c.argument('location', arg_type=get_location_type(self.cli_ctx), id_part='name') + c.argument('validation_type', arg_type=get_enum_type(['ValidateAddress', 'ValidateSubscriptionIsAllowedToCreate' + 'Job', 'ValidatePreferences', 'ValidateCreateOrderLimit', + 'ValidateSkuAvailability', + 'ValidateDataTransferDetails']), help='Identifies the ' + 'type of validation request.') + c.argument('shipping_address', action=AddShippingAddress, nargs='+', help='Shipping address of the customer.') + c.argument('device_type', arg_type=get_enum_type(['DataBox', 'DataBoxDisk', 'DataBoxHeavy', + 'DataBoxCustomerDisk']), help='Device type to be used for ' + 'the job.') + c.argument('preferred_shipment_type', arg_type=get_enum_type(['CustomerManaged', 'MicrosoftManaged']), + help='Indicates Shipment Logistics type that the customer preferred.', arg_group='Transport ' + 'Preferences') + + with self.argument_context('databox service validate-input') as c: + c.argument('location', arg_type=get_location_type(self.cli_ctx), id_part='name') + c.argument('create_job_validations', action=AddCreateJobValidations, nargs='+', help='It does all pre-job ' + 'creation validations.', arg_group='ValidationRequest') + + with self.argument_context('databox service validate-input-by-resource-group') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, + validator=get_default_location_from_resource_group, id_part='name') + c.argument('create_job_validations', action=AddCreateJobValidations, nargs='+', help='It does all pre-job ' + 'creation validations.', arg_group='ValidationRequest') diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/_version.py b/src/databox/azext_databox/generated/_validators.py similarity index 82% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/_version.py rename to src/databox/azext_databox/generated/_validators.py index eae7c95b6fb..b33a44c1ebf 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/_version.py +++ b/src/databox/azext_databox/generated/_validators.py @@ -1,9 +1,9 @@ -# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# # Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. # -------------------------------------------------------------------------- - -VERSION = "0.1.0" diff --git a/src/databox/azext_databox/generated/action.py b/src/databox/azext_databox/generated/action.py new file mode 100644 index 00000000000..748bb47b001 --- /dev/null +++ b/src/databox/azext_databox/generated/action.py @@ -0,0 +1,320 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=protected-access + +import argparse +from collections import defaultdict +from knack.util import CLIError + + +class AddSku(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.sku = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'name': + d['name'] = v[0] + elif kl == 'display-name': + d['display_name'] = v[0] + elif kl == 'family': + d['family'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter sku. All possible keys are: name, ' + 'display-name, family'.format(k)) + return d + + +class AddContactDetails(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.contact_details = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'contact-name': + d['contact_name'] = v[0] + elif kl == 'phone': + d['phone'] = v[0] + elif kl == 'phone-extension': + d['phone_extension'] = v[0] + elif kl == 'mobile': + d['mobile'] = v[0] + elif kl == 'email-list': + d['email_list'] = v + elif kl == 'notification-preference': + d['notification_preference'] = v + else: + raise CLIError('Unsupported Key {} is provided for parameter contact_details. All possible keys are: ' + 'contact-name, phone, phone-extension, mobile, email-list, notification-preference'. + format(k)) + return d + + +class AddShippingAddress(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.shipping_address = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + d['address_type'] = "None" + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'street-address1': + d['street_address1'] = v[0] + elif kl == 'street-address2': + d['street_address2'] = v[0] + elif kl == 'street-address3': + d['street_address3'] = v[0] + elif kl == 'city': + d['city'] = v[0] + elif kl == 'state-or-province': + d['state_or_province'] = v[0] + elif kl == 'country': + d['country'] = v[0] + elif kl == 'postal-code': + d['postal_code'] = v[0] + elif kl == 'zip-extended-code': + d['zip_extended_code'] = v[0] + elif kl == 'company-name': + d['company_name'] = v[0] + elif kl == 'address-type': + d['address_type'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter shipping_address. All possible keys are: ' + 'street-address1, street-address2, street-address3, city, state-or-province, country, ' + 'postal-code, zip-extended-code, company-name, address-type'.format(k)) + return d + + +class AddReturnPackageDetails(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.return_package_details = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'carrier-account-number': + d['carrier_account_number'] = v[0] + elif kl == 'carrier-name': + d['carrier_name'] = v[0] + elif kl == 'tracking-id': + d['tracking_id'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter return_package_details. All possible keys ' + 'are: carrier-account-number, carrier-name, tracking-id'.format(k)) + return d + + +class AddDeliveryPackageDetails(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.delivery_package_details = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'carrier-name': + d['carrier_name'] = v[0] + elif kl == 'tracking-id': + d['tracking_id'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter delivery_package_details. All possible ' + 'keys are: carrier-name, tracking-id'.format(k)) + return d + + +class AddDataBoxScheduleAvailabilityRequest(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.data_box_schedule_availability_request = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'storage-location': + d['storage_location'] = v[0] + elif kl == 'country': + d['country'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter data_box_schedule_availability_request. ' + 'All possible keys are: storage-location, country'.format(k)) + d['sku_name'] = 'DataBox' + return d + + +class AddDiskScheduleAvailabilityRequest(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.disk_schedule_availability_request = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'expected-data-size-in-tera-bytes': + d['expected_data_size_in_tera_bytes'] = v[0] + elif kl == 'storage-location': + d['storage_location'] = v[0] + elif kl == 'country': + d['country'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter disk_schedule_availability_request. All ' + 'possible keys are: expected-data-size-in-tera-bytes, storage-location, country'.format(k)) + d['sku_name'] = 'DataBoxDisk' + return d + + +class AddHeavyScheduleAvailabilityRequest(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.heavy_schedule_availability_request = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'storage-location': + d['storage_location'] = v[0] + elif kl == 'country': + d['country'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter heavy_schedule_availability_request. All ' + 'possible keys are: storage-location, country'.format(k)) + d['sku_name'] = 'DataBoxHeavy' + return d + + +class AddDatacenterAddressRequest(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.datacenter_address_request = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'storage-location': + d['storage_location'] = v[0] + elif kl == 'sku-name': + d['sku_name'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter datacenter_address_request. All possible ' + 'keys are: storage-location, sku-name'.format(k)) + return d + + +class AddCreateJobValidations(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.create_job_validations = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + d['validation_category'] = "JobCreationValidation" + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'individual-request-details': + d['individual_request_details'] = v + else: + raise CLIError('Unsupported Key {} is provided for parameter create_job_validations. All possible keys ' + 'are: individual-request-details'.format(k)) + d['validation_category'] = 'JobCreationValidation' + return d diff --git a/src/databox/azext_databox/generated/commands.py b/src/databox/azext_databox/generated/commands.py new file mode 100644 index 00000000000..b30165b11b7 --- /dev/null +++ b/src/databox/azext_databox/generated/commands.py @@ -0,0 +1,54 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=too-many-statements +# pylint: disable=too-many-locals + +from azure.cli.core.commands import CliCommandType + + +def load_command_table(self, _): + + from azext_databox.generated._client_factory import cf_job + databox_job = CliCommandType( + operations_tmpl='azext_databox.vendored_sdks.databox.operations._jobs_operations#JobsOperations.{}', + client_factory=cf_job) + with self.command_group('databox job', databox_job, client_factory=cf_job) as g: + g.custom_command('list', 'databox_job_list') + g.custom_show_command('show', 'databox_job_show') + g.custom_command('create', 'databox_job_create', supports_no_wait=True) + g.custom_command('update', 'databox_job_update', supports_no_wait=True) + g.custom_command('delete', 'databox_job_delete', supports_no_wait=True, confirmation=True) + g.custom_command('book-shipment-pick-up', 'databox_job_book_shipment_pick_up') + g.custom_command('cancel', 'databox_job_cancel') + g.custom_command('list-credentials', 'databox_job_list_credentials') + g.custom_command('mark-device-shipped', 'databox_job_mark_device_shipped') + g.custom_wait_command('wait', 'databox_job_show') + + from azext_databox.generated._client_factory import cf_databox + databox_ = CliCommandType( + operations_tmpl='azext_databox.vendored_sdks.databox.operations._model_operations#DataBoxManagementClientOperat' + 'ionsMixin.{}', + client_factory=cf_databox) + with self.command_group('databox', databox_, client_factory=cf_databox, is_experimental=True) as g: + g.custom_command('mitigate', 'databox_mitigate') + + from azext_databox.generated._client_factory import cf_service + databox_service = CliCommandType( + operations_tmpl='azext_databox.vendored_sdks.databox.operations._service_operations#ServiceOperations.{}', + client_factory=cf_service) + with self.command_group('databox service', databox_service, client_factory=cf_service) as g: + g.custom_command('list-available-sku-by-resource-group', + 'databox_service_list_available_sku_by_resource_group') + g.custom_command('region-configuration', 'databox_service_region_configuration') + g.custom_command('region-configuration-by-resource-group', 'databox_service_region_configuration_by_resource_gr' + 'oup') + g.custom_command('validate-address', 'databox_service_validate_address') + g.custom_command('validate-input', 'databox_service_validate_input') + g.custom_command('validate-input-by-resource-group', 'databox_service_validate_input_by_resource_group') diff --git a/src/databox/azext_databox/generated/custom.py b/src/databox/azext_databox/generated/custom.py new file mode 100644 index 00000000000..d61d68f745a --- /dev/null +++ b/src/databox/azext_databox/generated/custom.py @@ -0,0 +1,285 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=too-many-lines + +from knack.util import CLIError +from azure.cli.core.util import sdk_no_wait + + +def databox_job_list(client, + resource_group_name=None, + skip_token=None): + if resource_group_name: + return client.list_by_resource_group(resource_group_name=resource_group_name, + skip_token=skip_token) + return client.list(skip_token=skip_token) + + +def databox_job_show(client, + resource_group_name, + job_name, + expand=None): + return client.get(resource_group_name=resource_group_name, + job_name=job_name, + expand=expand) + + +def databox_job_create(client, + resource_group_name, + job_name, + location, + sku, + transfer_type, + tags=None, + type_=None, + user_assigned_identities=None, + details=None, + delivery_type=None, + scheduled_date_time=None, + no_wait=False): + if type_ is None: + type_ = "None" + if delivery_type is None: + delivery_type = "NonScheduled" + job_resource = {} + job_resource['location'] = location + job_resource['tags'] = tags + job_resource['sku'] = sku + job_resource['identity'] = {} + job_resource['identity']['type'] = "None" if type_ is None else type_ + job_resource['identity']['user_assigned_identities'] = user_assigned_identities + job_resource['details'] = details + job_resource['delivery_type'] = "NonScheduled" if delivery_type is None else delivery_type + job_resource['delivery_info'] = {} + job_resource['delivery_info']['scheduled_date_time'] = scheduled_date_time + return sdk_no_wait(no_wait, + client.begin_create, + resource_group_name=resource_group_name, + job_name=job_name, + job_resource=job_resource) + + +def databox_job_update(client, + resource_group_name, + job_name, + if_match=None, + tags=None, + contact_details=None, + shipping_address=None, + key_encryption_key=None, + return_package_details=None, + type_=None, + user_assigned_identities=None, + no_wait=False): + if type_ is None: + type_ = "None" + job_resource_update_parameter = {} + job_resource_update_parameter['tags'] = tags + job_resource_update_parameter['details'] = {} + job_resource_update_parameter['details']['contact_details'] = contact_details + job_resource_update_parameter['details']['shipping_address'] = shipping_address + job_resource_update_parameter['details']['key_encryption_key'] = key_encryption_key + job_resource_update_parameter['details']['return_package_details'] = return_package_details + job_resource_update_parameter['identity'] = {} + job_resource_update_parameter['identity']['type'] = "None" if type_ is None else type_ + job_resource_update_parameter['identity']['user_assigned_identities'] = user_assigned_identities + return sdk_no_wait(no_wait, + client.begin_update, + resource_group_name=resource_group_name, + job_name=job_name, + if_match=if_match, + job_resource_update_parameter=job_resource_update_parameter) + + +def databox_job_delete(client, + resource_group_name, + job_name, + no_wait=False): + return sdk_no_wait(no_wait, + client.begin_delete, + resource_group_name=resource_group_name, + job_name=job_name) + + +def databox_job_book_shipment_pick_up(client, + resource_group_name, + job_name, + start_time, + end_time, + shipment_location): + shipment_pick_up_request = {} + shipment_pick_up_request['start_time'] = start_time + shipment_pick_up_request['end_time'] = end_time + shipment_pick_up_request['shipment_location'] = shipment_location + return client.book_shipment_pick_up(resource_group_name=resource_group_name, + job_name=job_name, + shipment_pick_up_request=shipment_pick_up_request) + + +def databox_job_cancel(client, + resource_group_name, + job_name, + reason): + cancellation_reason = {} + cancellation_reason['reason'] = reason + return client.cancel(resource_group_name=resource_group_name, + job_name=job_name, + cancellation_reason=cancellation_reason) + + +def databox_job_list_credentials(client, + resource_group_name, + job_name): + return client.list_credentials(resource_group_name=resource_group_name, + job_name=job_name) + + +def databox_job_mark_device_shipped(client, + job_name, + resource_group_name, + delivery_package_details): + mark_devices_shipped_request = {} + mark_devices_shipped_request['delivery_package_details'] = delivery_package_details + return client.mark_devices_shipped(job_name=job_name, + resource_group_name=resource_group_name, + mark_devices_shipped_request=mark_devices_shipped_request) + + +def databox_mitigate(client, + job_name, + resource_group_name, + customer_resolution_code): + mitigate_job_request = {} + mitigate_job_request['customer_resolution_code'] = customer_resolution_code + return client.mitigate(job_name=job_name, + resource_group_name=resource_group_name, + mitigate_job_request=mitigate_job_request) + + +def databox_service_list_available_sku_by_resource_group(client, + resource_group_name, + location, + transfer_type, + country, + available_sku_request_location, + sku_names=None): + available_sku_request = {} + available_sku_request['transfer_type'] = transfer_type + available_sku_request['country'] = country + available_sku_request['location'] = available_sku_request_location + available_sku_request['sku_names'] = sku_names + return client.list_available_skus_by_resource_group(resource_group_name=resource_group_name, + location=location, + available_sku_request=available_sku_request) + + +def databox_service_region_configuration(client, + location, + data_box_schedule_availability_request=None, + disk_schedule_availability_request=None, + heavy_schedule_availability_request=None, + datacenter_address_request=None, + sku_name=None): + all_schedule_availability_request = [] + if data_box_schedule_availability_request is not None: + all_schedule_availability_request.append(data_box_schedule_availability_request) + if disk_schedule_availability_request is not None: + all_schedule_availability_request.append(disk_schedule_availability_request) + if heavy_schedule_availability_request is not None: + all_schedule_availability_request.append(heavy_schedule_availability_request) + if len(all_schedule_availability_request) > 1: + raise CLIError('at most one of data_box_schedule_availability_request, disk_schedule_availability_request, ' + 'heavy_schedule_availability_request is needed for schedule_availability_request!') + schedule_availability_request = all_schedule_availability_request[0] if len(all_schedule_availability_request) == \ + 1 else None + region_configuration_request = {} + region_configuration_request['schedule_availability_request'] = schedule_availability_request + region_configuration_request['datacenter_address_request'] = datacenter_address_request + region_configuration_request['transport_availability_request'] = {} + region_configuration_request['transport_availability_request']['sku_name'] = sku_name + return client.region_configuration(location=location, + region_configuration_request=region_configuration_request) + + +def databox_service_region_configuration_by_resource_group(client, + resource_group_name, + location, + data_box_schedule_availability_request=None, + disk_schedule_availability_request=None, + heavy_schedule_availability_request=None, + datacenter_address_request=None, + sku_name=None): + all_schedule_availability_request = [] + if data_box_schedule_availability_request is not None: + all_schedule_availability_request.append(data_box_schedule_availability_request) + if disk_schedule_availability_request is not None: + all_schedule_availability_request.append(disk_schedule_availability_request) + if heavy_schedule_availability_request is not None: + all_schedule_availability_request.append(heavy_schedule_availability_request) + if len(all_schedule_availability_request) > 1: + raise CLIError('at most one of data_box_schedule_availability_request, disk_schedule_availability_request, ' + 'heavy_schedule_availability_request is needed for schedule_availability_request!') + schedule_availability_request = all_schedule_availability_request[0] if len(all_schedule_availability_request) == \ + 1 else None + region_configuration_request = {} + region_configuration_request['schedule_availability_request'] = schedule_availability_request + region_configuration_request['datacenter_address_request'] = datacenter_address_request + region_configuration_request['transport_availability_request'] = {} + region_configuration_request['transport_availability_request']['sku_name'] = sku_name + return client.region_configuration_by_resource_group(resource_group_name=resource_group_name, + location=location, + region_configuration_request=region_configuration_request) + + +def databox_service_validate_address(client, + location, + validation_type, + shipping_address, + device_type, + preferred_shipment_type=None): + validate_address = {} + validate_address['shipping_address'] = shipping_address + validate_address['device_type'] = device_type + validate_address['transport_preferences'] = {} + validate_address['transport_preferences']['preferred_shipment_type'] = preferred_shipment_type + return client.validate_address(location=location, + validate_address=validate_address) + + +def databox_service_validate_input(client, + location, + create_job_validations=None): + all_validation_request = [] + if create_job_validations is not None: + all_validation_request.append(create_job_validations) + if len(all_validation_request) > 1: + raise CLIError('at most one of create_job_validations is needed for validation_request!') + if len(all_validation_request) != 1: + raise CLIError('validation_request is required. but none of create_job_validations is provided!') + validation_request = all_validation_request[0] if len(all_validation_request) == 1 else None + return client.validate_inputs(location=location, + validation_request=validation_request) + + +def databox_service_validate_input_by_resource_group(client, + resource_group_name, + location, + create_job_validations=None): + all_validation_request = [] + if create_job_validations is not None: + all_validation_request.append(create_job_validations) + if len(all_validation_request) > 1: + raise CLIError('at most one of create_job_validations is needed for validation_request!') + if len(all_validation_request) != 1: + raise CLIError('validation_request is required. but none of create_job_validations is provided!') + validation_request = all_validation_request[0] if len(all_validation_request) == 1 else None + return client.validate_inputs_by_resource_group(resource_group_name=resource_group_name, + location=location, + validation_request=validation_request) diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/__init__.py b/src/databox/azext_databox/manual/__init__.py similarity index 73% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/__init__.py rename to src/databox/azext_databox/manual/__init__.py index bb6b75a72db..c9cfdc73e77 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/__init__.py +++ b/src/databox/azext_databox/manual/__init__.py @@ -1,10 +1,12 @@ # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# # Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. # -------------------------------------------------------------------------- -from ._data_box_management_client import DataBoxManagementClient -__all__ = ['DataBoxManagementClient'] +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/src/databox/azext_databox/tests/__init__.py b/src/databox/azext_databox/tests/__init__.py new file mode 100644 index 00000000000..70488e93851 --- /dev/null +++ b/src/databox/azext_databox/tests/__init__.py @@ -0,0 +1,116 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +import inspect +import logging +import os +import sys +import traceback +import datetime as dt + +from azure.core.exceptions import AzureError +from azure.cli.testsdk.exceptions import CliTestError, CliExecutionError, JMESPathCheckAssertionError + + +logger = logging.getLogger('azure.cli.testsdk') +logger.addHandler(logging.StreamHandler()) +__path__ = __import__('pkgutil').extend_path(__path__, __name__) +exceptions = [] +test_map = dict() +SUCCESSED = "successed" +FAILED = "failed" + + +def try_manual(func): + def import_manual_function(origin_func): + from importlib import import_module + decorated_path = inspect.getfile(origin_func).lower() + module_path = __path__[0].lower() + if not decorated_path.startswith(module_path): + raise Exception("Decorator can only be used in submodules!") + manual_path = os.path.join( + decorated_path[module_path.rfind(os.path.sep) + 1:]) + manual_file_path, manual_file_name = os.path.split(manual_path) + module_name, _ = os.path.splitext(manual_file_name) + manual_module = "..manual." + \ + ".".join(manual_file_path.split(os.path.sep) + [module_name, ]) + return getattr(import_module(manual_module, package=__name__), origin_func.__name__) + + def get_func_to_call(): + func_to_call = func + try: + func_to_call = import_manual_function(func) + logger.info("Found manual override for %s(...)", func.__name__) + except (ImportError, AttributeError): + pass + return func_to_call + + def wrapper(*args, **kwargs): + func_to_call = get_func_to_call() + logger.info("running %s()...", func.__name__) + try: + test_map[func.__name__] = dict() + test_map[func.__name__]["result"] = SUCCESSED + test_map[func.__name__]["error_message"] = "" + test_map[func.__name__]["error_stack"] = "" + test_map[func.__name__]["error_normalized"] = "" + test_map[func.__name__]["start_dt"] = dt.datetime.utcnow() + ret = func_to_call(*args, **kwargs) + except (AssertionError, AzureError, CliTestError, CliExecutionError, SystemExit, + JMESPathCheckAssertionError) as e: + use_exception_cache = os.getenv("TEST_EXCEPTION_CACHE") + if use_exception_cache is None or use_exception_cache.lower() != "true": + raise + test_map[func.__name__]["end_dt"] = dt.datetime.utcnow() + test_map[func.__name__]["result"] = FAILED + test_map[func.__name__]["error_message"] = str(e).replace("\r\n", " ").replace("\n", " ")[:500] + test_map[func.__name__]["error_stack"] = traceback.format_exc().replace( + "\r\n", " ").replace("\n", " ")[:500] + logger.info("--------------------------------------") + logger.info("step exception: %s", e) + logger.error("--------------------------------------") + logger.error("step exception in %s: %s", func.__name__, e) + logger.info(traceback.format_exc()) + exceptions.append((func.__name__, sys.exc_info())) + else: + test_map[func.__name__]["end_dt"] = dt.datetime.utcnow() + return ret + + if inspect.isclass(func): + return get_func_to_call() + return wrapper + + +def calc_coverage(filename): + filename = filename.split(".")[0] + coverage_name = filename + "_coverage.md" + with open(coverage_name, "w") as f: + f.write("|Scenario|Result|ErrorMessage|ErrorStack|ErrorNormalized|StartDt|EndDt|\n") + total = len(test_map) + covered = 0 + for k, v in test_map.items(): + if not k.startswith("step_"): + total -= 1 + continue + if v["result"] == SUCCESSED: + covered += 1 + f.write("|{step_name}|{result}|{error_message}|{error_stack}|{error_normalized}|{start_dt}|" + "{end_dt}|\n".format(step_name=k, **v)) + f.write("Coverage: {}/{}\n".format(covered, total)) + print("Create coverage\n", file=sys.stderr) + + +def raise_if(): + if exceptions: + if len(exceptions) <= 1: + raise exceptions[0][1][1] + message = "{}\nFollowed with exceptions in other steps:\n".format(str(exceptions[0][1][1])) + message += "\n".join(["{}: {}".format(h[0], h[1][1]) for h in exceptions[1:]]) + raise exceptions[0][1][0](message).with_traceback(exceptions[0][1][2]) diff --git a/src/databox/azext_databox/tests/latest/__init__.py b/src/databox/azext_databox/tests/latest/__init__.py new file mode 100644 index 00000000000..c9cfdc73e77 --- /dev/null +++ b/src/databox/azext_databox/tests/latest/__init__.py @@ -0,0 +1,12 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/src/databox/azext_databox/tests/latest/example_steps.py b/src/databox/azext_databox/tests/latest/example_steps.py new file mode 100644 index 00000000000..8b79dbe1804 --- /dev/null +++ b/src/databox/azext_databox/tests/latest/example_steps.py @@ -0,0 +1,452 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + + +from .. import try_manual + + +# EXAMPLE: /Jobs/put/JobsCreate +@try_manual +def step_job_create(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=None): + if checks is None: + checks = [] + test.cmd('az databox job create ' + '--name "{myJob}" ' + '--location "westus" ' + '--transfer-type "ImportToAzure" ' + '--details "{{\\"contactDetails\\":{{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@m' + 'icrosoft.com\\"],\\"phone\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"}},\\"dataImportDetails\\":[' + '{{\\"accountDetails\\":{{\\"dataAccountType\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscripti' + 'ons/{subscription_id}/resourcegroups/{rg_5}/providers/Microsoft.Storage/storageAccounts/{sa}\\"}}}}],\\"j' + 'obDetailsType\\":\\"DataBox\\",\\"shippingAddress\\":{{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"S' + 'an Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"' + 'stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit ' + '1\\"}}}}" ' + '--sku name="DataBox" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Jobs/put/JobsCreateDevicePassword +@try_manual +def step_job_create2(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, + rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox job create ' + '--name "{myJob5}" ' + '--location "westus" ' + '--transfer-type "ImportToAzure" ' + '--details "{{\\"contactDetails\\":{{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@m' + 'icrosoft.com\\"],\\"phone\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"}},\\"dataImportDetails\\":[' + '{{\\"accountDetails\\":{{\\"dataAccountType\\":\\"StorageAccount\\",\\"sharePassword\\":\\"\\",\\"storageAccountId\\":\\"/subscriptions/{subscription_id}/resourceGroups/{rg_6}/providers/Microsoft' + '.Storage/storageAccounts/{sa_2}\\"}}}}],\\"devicePassword\\":\\"\\",\\"jobDetailsType\\":' + '\\"DataBox\\",\\"shippingAddress\\":{{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San ' + 'Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"sta' + 'teOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit ' + '1\\"}}}}" ' + '--sku name="DataBox" ' + '--resource-group "{rg_7}"', + checks=checks) + + +# EXAMPLE: /Jobs/put/JobsCreateDoubleEncryption +@try_manual +def step_job_create3(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, + rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox job create ' + '--name "{myJob6}" ' + '--location "westus" ' + '--transfer-type "ImportToAzure" ' + '--details "{{\\"contactDetails\\":{{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@m' + 'icrosoft.com\\"],\\"phone\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"}},\\"dataImportDetails\\":[' + '{{\\"accountDetails\\":{{\\"dataAccountType\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscripti' + 'ons/{subscription_id}/resourcegroups/{rg_5}/providers/Microsoft.Storage/storageAccounts/{sa}\\"}}}}],\\"j' + 'obDetailsType\\":\\"DataBox\\",\\"preferences\\":{{\\"encryptionPreferences\\":{{\\"doubleEncryption\\":' + '\\"Enabled\\"}}}},\\"shippingAddress\\":{{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San ' + 'Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"sta' + 'teOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit ' + '1\\"}}}}" ' + '--sku name="DataBox" ' + '--resource-group "{rg_8}"', + checks=checks) + + +# EXAMPLE: /Jobs/put/JobsCreateExport +@try_manual +def step_job_create4(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, + rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox job create ' + '--name "{myJob4}" ' + '--location "westus" ' + '--transfer-type "ExportFromAzure" ' + '--details "{{\\"contactDetails\\":{{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@m' + 'icrosoft.com\\"],\\"phone\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"}},\\"dataExportDetails\\":[' + '{{\\"accountDetails\\":{{\\"dataAccountType\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscripti' + 'ons/{subscription_id}/resourceGroups/{rg_9}/providers/Microsoft.Storage/storageAccounts/{sa_3}\\"}},\\"tr' + 'ansferConfiguration\\":{{\\"transferAllDetails\\":{{\\"include\\":{{\\"dataAccountType\\":\\"StorageAccou' + 'nt\\",\\"transferAllBlobs\\":true,\\"transferAllFiles\\":true}}}},\\"transferConfigurationType\\":\\"Tran' + 'sferAll\\"}}}}],\\"jobDetailsType\\":\\"DataBox\\",\\"shippingAddress\\":{{\\"addressType\\":\\"Commercia' + 'l\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"postalCod' + 'e\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND ' + 'ST\\",\\"streetAddress2\\":\\"Unit 1\\"}}}}" ' + '--sku name="DataBox" ' + '--resource-group "{rg_4}"', + checks=checks) + + +# EXAMPLE: /Jobs/put/JobsCreateWithUserAssignedIdentity +@try_manual +def step_job_create5(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, + rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox job create ' + '--name "{myJob7}" ' + '--type "UserAssigned" ' + '--user-assigned-identities "{{\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/akven' + 'kat/providers/Microsoft.ManagedIdentity/userAssignedIdentities/sdkIdentity\\":{{}}}}" ' + '--location "westus" ' + '--transfer-type "ImportToAzure" ' + '--details "{{\\"contactDetails\\":{{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@m' + 'icrosoft.com\\"],\\"phone\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"}},\\"dataImportDetails\\":[' + '{{\\"accountDetails\\":{{\\"dataAccountType\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscripti' + 'ons/{subscription_id}/resourceGroups/{rg_6}/providers/Microsoft.Storage/storageAccounts/{sa_2}\\"}}}}],\\' + '"jobDetailsType\\":\\"DataBox\\",\\"shippingAddress\\":{{\\"addressType\\":\\"Commercial\\",\\"city\\":\\' + '"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",' + '\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit ' + '1\\"}}}}" ' + '--sku name="DataBox" ' + '--resource-group "{rg_10}"', + checks=checks) + + +# EXAMPLE: /Jobs/get/JobsGet +@try_manual +def step_job_show(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=None): + if checks is None: + checks = [] + test.cmd('az databox job show ' + '--expand "details" ' + '--name "{myJob}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Jobs/get/JobsGetCmk +@try_manual +def step_job_show2(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=None): + if checks is None: + checks = [] + test.cmd('az databox job show ' + '--expand "details" ' + '--name "{myJob2}" ' + '--resource-group "{rg_2}"', + checks=checks) + + +# EXAMPLE: /Jobs/get/JobsGetCopyStuck +@try_manual +def step_job_show3(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=None): + if checks is None: + checks = [] + test.cmd('az databox job show ' + '--expand "details" ' + '--name "{myJob3}" ' + '--resource-group "{rg_3}"', + checks=checks) + + +# EXAMPLE: /Jobs/get/JobsGetExport +@try_manual +def step_job_show4(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=None): + if checks is None: + checks = [] + test.cmd('az databox job show ' + '--expand "details" ' + '--name "{myJob4}" ' + '--resource-group "{rg_4}"', + checks=checks) + + +# EXAMPLE: /Jobs/get/JobsGetWaitingForAction +@try_manual +def step_job_show5(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=None): + return step_job_show3(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, + rg_15, checks) + + +# EXAMPLE: /Jobs/get/JobsList +@try_manual +def step_job_list(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=None): + if checks is None: + checks = [] + test.cmd('az databox job list ' + '-g ""', + checks=checks) + + +# EXAMPLE: /Jobs/get/JobsListByResourceGroup +@try_manual +def step_job_list2(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=None): + if checks is None: + checks = [] + test.cmd('az databox job list ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Jobs/patch/JobsPatch +@try_manual +def step_job_update(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=None): + if checks is None: + checks = [] + test.cmd('az databox job update ' + '--name "{myJob}" ' + '--contact-details contact-name="Update Job" email-list="testing@microsoft.com" phone="1234567890" ' + 'phone-extension="1234" ' + '--shipping-address address-type="Commercial" city="San Francisco" company-name="Microsoft" country="US" ' + 'postal-code="94107" state-or-province="CA" street-address1="16 TOWNSEND ST" street-address2="Unit 1" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Jobs/patch/JobsPatchCmk +@try_manual +def step_job_update2(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, + rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox job update ' + '--name "{myJob2}" ' + '--key-encryption-key "{{\\"kekType\\":\\"CustomerManaged\\",\\"kekUrl\\":\\"https://sdkkeyvault.vault.azu' + 're.net/keys/SSDKEY/\\",\\"kekVaultResourceID\\":\\"/subscriptions/{subscription_id}/resourceGroups/{rg_9}' + '/providers/Microsoft.KeyVault/vaults/SDKKeyVault\\"}}" ' + '--resource-group "{rg_2}"', + checks=checks) + + +# EXAMPLE: /Jobs/patch/JobsPatchSystemAssignedToUserAssigned +@try_manual +def step_job_update3(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, + rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox job update ' + '--name "{myJob8}" ' + '--type "SystemAssigned,UserAssigned" ' + '--user-assigned-identities "{{\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/akven' + 'kat/providers/Microsoft.ManagedIdentity/userAssignedIdentities/sdkIdentity\\":{{}}}}" ' + '--key-encryption-key "{{\\"identityProperties\\":{{\\"type\\":\\"UserAssigned\\",\\"userAssigned\\":{{\\"' + 'resourceId\\":\\"/subscriptions/{subscription_id}/resourceGroups/{rg_9}/providers/Microsoft.ManagedIdenti' + 'ty/userAssignedIdentities/sdkIdentity\\"}}}},\\"kekType\\":\\"CustomerManaged\\",\\"kekUrl\\":\\"https://' + 'sdkkeyvault.vault.azure.net/keys/SSDKEY/\\",\\"kekVaultResourceID\\":\\"/subscriptions/{subscription_id}/' + 'resourceGroups/{rg_9}/providers/Microsoft.KeyVault/vaults/SDKKeyVault\\"}}" ' + '--resource-group "{rg_11}"', + checks=checks) + + +# EXAMPLE: /Jobs/post/BookShipmentPickupPost +@try_manual +def step_job_book_shipment_pick_up(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, + rg_13, rg_14, rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox job book-shipment-pick-up ' + '--name "{myJob9}" ' + '--resource-group "{rg_12}" ' + '--end-time "2019-09-22T18:30:00Z" ' + '--shipment-location "Front desk" ' + '--start-time "2019-09-20T18:30:00Z"', + checks=checks) + + +# EXAMPLE: /Jobs/post/JobsCancelPost +@try_manual +def step_job_cancel(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=None): + if checks is None: + checks = [] + test.cmd('az databox job cancel ' + '--reason "CancelTest" ' + '--name "{myJob}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Jobs/post/JobsListCredentials +@try_manual +def step_job_list_credentials(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, + rg_14, rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox job list-credentials ' + '--name "{myJob9}" ' + '--resource-group "{rg_12}"', + checks=checks) + + +# EXAMPLE: /Jobs/post/MarkDevicesShipped +@try_manual +def step_job_mark_device_shipped(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, + rg_14, rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox job mark-device-shipped ' + '--name "{myJob10}" ' + '--delivery-package-details carrier-name="DHL" tracking-id="123456" ' + '--resource-group "{rg_13}"', + checks=checks) + + +# EXAMPLE: /databox/post/Mitigate +@try_manual +def step_mitigate(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=None): + if checks is None: + checks = [] + test.cmd('az databox mitigate ' + '--job-name "{myJob10}" ' + '--customer-resolution-code "MoveToCleanUpDevice" ' + '--resource-group "{rg_13}"', + checks=checks) + + +# EXAMPLE: /Jobs/delete/JobsDelete +@try_manual +def step_job_delete(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=None): + if checks is None: + checks = [] + test.cmd('az databox job delete -y ' + '--name "{myJob}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Service/post/AvailableSkusPost +@try_manual +def step_service_list_available_sku_by_resource_group(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, + rg_11, rg_12, rg_13, rg_14, rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox service list-available-sku-by-resource-group ' + '--country "US" ' + '--available-sku-request-location "westus" ' + '--transfer-type "ImportToAzure" ' + '--location "westus" ' + '--resource-group "{rg_12}"', + checks=checks) + + +# EXAMPLE: /Service/post/RegionConfiguration +@try_manual +def step_service_region_configuration(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, + rg_13, rg_14, rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox service region-configuration ' + '--location "westus" ' + '--schedule-availability-request "{{\\"skuName\\":\\"DataBox\\",\\"storageLocation\\":\\"westus\\"}}"', + checks=checks) + + +# EXAMPLE: /Service/post/RegionConfigurationByResourceGroup +@try_manual +def step_service_region_configuration2(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, + rg_13, rg_14, rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox service region-configuration-by-resource-group ' + '--location "westus" ' + '--schedule-availability-request "{{\\"skuName\\":\\"DataBox\\",\\"storageLocation\\":\\"westus\\"}}" ' + '--resource-group "{rg_14}"', + checks=checks) + + +# EXAMPLE: /Service/post/ValidateAddressPost +@try_manual +def step_service_validate_address(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, + rg_14, rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox service validate-address ' + '--location "westus" ' + '--device-type "DataBox" ' + '--shipping-address address-type="Commercial" city="San Francisco" company-name="Microsoft" country="US" ' + 'postal-code="94107" state-or-province="CA" street-address1="16 TOWNSEND ST" street-address2="Unit 1" ' + '--validation-type "ValidateAddress"', + checks=checks) + + +# EXAMPLE: /Service/post/ValidateInputs +@try_manual +def step_service_validate_input(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, + rg_14, rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox service validate-input ' + '--location "westus" ' + '--validation-request "{{\\"individualRequestDetails\\":[{{\\"dataImportDetails\\":[{{\\"accountDetails\\"' + ':{{\\"dataAccountType\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/{subscription_id}/' + 'resourcegroups/{rg_5}/providers/Microsoft.Storage/storageAccounts/{sa}\\"}}}}],\\"deviceType\\":\\"DataBo' + 'x\\",\\"transferType\\":\\"ImportToAzure\\",\\"validationType\\":\\"ValidateDataTransferDetails\\"}},{{\\' + '"deviceType\\":\\"DataBox\\",\\"shippingAddress\\":{{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San' + ' Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"st' + 'ateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit ' + '1\\"}},\\"transportPreferences\\":{{\\"preferredShipmentType\\":\\"MicrosoftManaged\\"}},\\"validationTyp' + 'e\\":\\"ValidateAddress\\"}},{{\\"validationType\\":\\"ValidateSubscriptionIsAllowedToCreateJob\\"}},{{\\' + '"country\\":\\"US\\",\\"deviceType\\":\\"DataBox\\",\\"location\\":\\"westus\\",\\"transferType\\":\\"Imp' + 'ortToAzure\\",\\"validationType\\":\\"ValidateSkuAvailability\\"}},{{\\"deviceType\\":\\"DataBox\\",\\"va' + 'lidationType\\":\\"ValidateCreateOrderLimit\\"}},{{\\"deviceType\\":\\"DataBox\\",\\"preference\\":{{\\"t' + 'ransportPreferences\\":{{\\"preferredShipmentType\\":\\"MicrosoftManaged\\"}}}},\\"validationType\\":\\"V' + 'alidatePreferences\\"}}],\\"validationCategory\\":\\"JobCreationValidation\\"}}"', + checks=checks) + + +# EXAMPLE: /Service/post/ValidateInputsByResourceGroup +@try_manual +def step_service_validate_input_by_resource_group(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, + rg_11, rg_12, rg_13, rg_14, rg_15, checks=None): + if checks is None: + checks = [] + test.cmd('az databox service validate-input-by-resource-group ' + '--location "westus" ' + '--resource-group "{rg_15}" ' + '--validation-request "{{\\"individualRequestDetails\\":[{{\\"dataImportDetails\\":[{{\\"accountDetails\\"' + ':{{\\"dataAccountType\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/{subscription_id}/' + 'resourcegroups/{rg_5}/providers/Microsoft.Storage/storageAccounts/{sa}\\"}}}}],\\"deviceType\\":\\"DataBo' + 'x\\",\\"transferType\\":\\"ImportToAzure\\",\\"validationType\\":\\"ValidateDataTransferDetails\\"}},{{\\' + '"deviceType\\":\\"DataBox\\",\\"shippingAddress\\":{{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San' + ' Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"st' + 'ateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit ' + '1\\"}},\\"transportPreferences\\":{{\\"preferredShipmentType\\":\\"MicrosoftManaged\\"}},\\"validationTyp' + 'e\\":\\"ValidateAddress\\"}},{{\\"validationType\\":\\"ValidateSubscriptionIsAllowedToCreateJob\\"}},{{\\' + '"country\\":\\"US\\",\\"deviceType\\":\\"DataBox\\",\\"location\\":\\"westus\\",\\"transferType\\":\\"Imp' + 'ortToAzure\\",\\"validationType\\":\\"ValidateSkuAvailability\\"}},{{\\"deviceType\\":\\"DataBox\\",\\"va' + 'lidationType\\":\\"ValidateCreateOrderLimit\\"}},{{\\"deviceType\\":\\"DataBox\\",\\"preference\\":{{\\"t' + 'ransportPreferences\\":{{\\"preferredShipmentType\\":\\"MicrosoftManaged\\"}}}},\\"validationType\\":\\"V' + 'alidatePreferences\\"}}],\\"validationCategory\\":\\"JobCreationValidation\\"}}"', + checks=checks) + diff --git a/src/databox/azext_databox/tests/latest/test_databox_scenario.py b/src/databox/azext_databox/tests/latest/test_databox_scenario.py index a19ecfbd149..8d9c416f415 100644 --- a/src/databox/azext_databox/tests/latest/test_databox_scenario.py +++ b/src/databox/azext_databox/tests/latest/test_databox_scenario.py @@ -1,143 +1,165 @@ -# -------------------------------------------------------------------------------------------- +# -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=line-too-long import os +from azure.cli.testsdk import ScenarioTest +from azure.cli.testsdk import ResourceGroupPreparer +from azure.cli.testsdk import StorageAccountPreparer +from .example_steps import step_job_create +from .example_steps import step_job_show +from .example_steps import step_job_list2 +from .example_steps import step_job_list +from .example_steps import step_job_book_shipment_pick_up +from .example_steps import step_job_list_credentials +from .example_steps import step_job_cancel +from .example_steps import step_job_update +from .example_steps import step_service_validate_address +from .example_steps import step_service_list_available_sku_by_resource_group +from .example_steps import step_job_delete +from .example_steps import step_job_show5 +from .example_steps import step_job_mark_device_shipped +from .. import ( + try_manual, + raise_if, + calc_coverage +) -from azure.cli.testsdk import (ScenarioTest, ResourceGroupPreparer, StorageAccountPreparer, JMESPathCheck) TEST_DIR = os.path.abspath(os.path.join(os.path.abspath(__file__), '..')) -class DataBoxScenarioTest(ScenarioTest): +# Env setup_scenario +@try_manual +def setup_scenario(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, + rg_15): + pass + + +# Env cleanup_scenario +@try_manual +def cleanup_scenario(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, + rg_15): + pass + + +# Testcase: Scenario +@try_manual +def call_scenario(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15): + setup_scenario(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15) + step_job_create(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=[ + test.check("name", "{myJob}", case_sensitive=False), + test.check("location", "westus", case_sensitive=False), + test.check("transferType", "ImportToAzure", case_sensitive=False), + test.check("sku.name", "DataBox", case_sensitive=False), + ]) + # STEP NOT FOUND: JobsGet6 + # STEP NOT FOUND: JobsGet5 + # STEP NOT FOUND: JobsGet4 + # STEP NOT FOUND: JobsGet3 + # STEP NOT FOUND: JobsGet2 + # STEP NOT FOUND: JobsGet1 + step_job_show(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=[ + test.check("name", "{myJob}", case_sensitive=False), + test.check("location", "westus", case_sensitive=False), + test.check("transferType", "ImportToAzure", case_sensitive=False), + test.check("sku.name", "DataBox", case_sensitive=False), + ]) + step_job_list2(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=[ + test.check('length(@)', 1), + ]) + step_job_list(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=[ + test.check('length(@)', 9), + ]) + # STEP NOT FOUND: OperationsGet + # STEP NOT FOUND: ServiceValidateInputsByResourceGroup + # STEP NOT FOUND: AvailableSkusByResourceGroup + step_job_book_shipment_pick_up(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, + rg_13, rg_14, rg_15, checks=[]) + step_job_list_credentials(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, + rg_14, rg_15, checks=[]) + step_job_cancel(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=[]) + step_job_update(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=[ + test.check("name", "{myJob}", case_sensitive=False), + ]) + # STEP NOT FOUND: ServiceRegionConfiguration + step_service_validate_address(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, + rg_14, rg_15, checks=[]) + # STEP NOT FOUND: ServiceValidateInputs + step_service_list_available_sku_by_resource_group(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, + rg_11, rg_12, rg_13, rg_14, rg_15, checks=[]) + step_job_delete(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=[]) + # STEP NOT FOUND: JobMitigate + step_job_show5(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15, + checks=[]) + step_job_mark_device_shipped(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, + rg_14, rg_15, checks=[]) + cleanup_scenario(test, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, + rg_15) + + +# Test class for Scenario +@try_manual +class DataboxScenarioTest(ScenarioTest): + + def __init__(self, *args, **kwargs): + super(DataboxScenarioTest, self).__init__(*args, **kwargs) + self.kwargs.update({ + 'subscription_id': self.get_subscription_id() + }) - @ResourceGroupPreparer(name_prefix='cli_test_databox') - @StorageAccountPreparer(parameter_name='storage_account_1') - @StorageAccountPreparer(parameter_name='storage_account_2') - def test_databox(self, storage_account_1, storage_account_2): - job_name = self.create_random_name('job', 24) - job_name_2 = self.create_random_name('job', 24) self.kwargs.update({ - 'job_name': job_name, - 'job_name_2': job_name_2, - 'storage_account_1': storage_account_1, - 'storage_account_2': storage_account_2 + 'myJob': 'SdkJob952', + 'myJob2': 'SdkJob1735', + 'myJob3': 'TJx-637505258985313014', + 'myJob4': 'SdkJob6429', + 'myJob5': 'SdkJob9640', + 'myJob6': 'SdkJob6599', + 'myJob7': 'SdkJob5337', + 'myJob8': 'SdkJob2965', + 'myJob9': 'TJ-636646322037905056', + 'myJob10': 'SdkJob8367', }) - # Create a databox job with sku 'DataBox'. - self.cmd('databox job create ' - '--resource-group {rg} ' - '--name {job_name} ' - '--location westus ' - '--sku DataBox ' - '--contact-name "Public SDK Test" ' - '--phone 14258828080 ' - '--email-list testing@microsoft.com ' - '--street-address1 "1 MICROSOFT WAY" ' - '--city Redmond ' - '--state-or-province WA ' - '--country US ' - '--postal-code 98052 ' - '--company-name Microsoft ' - '--storage-account {storage_account_1} {storage_account_2} ' - '--staging-storage-account {storage_account_1} ' - '--resource-group-for-managed-disk rg-for-managed-disk', - checks=[JMESPathCheck('status', 'DeviceOrdered')]) - - self.cmd('databox job update ' - '--resource-group {rg} ' - '--name {job_name} ' - '--contact-name "Public SDK Test 1" ' - '--email-list testing1@microsoft.com', - checks=[]) - - self.cmd('databox job show ' - '--resource-group {rg} ' - '--name {job_name}', - checks=[ - JMESPathCheck('name', job_name), - JMESPathCheck('isCancellable', True), - JMESPathCheck('isDeletable', False), - JMESPathCheck('details.contactDetails.contactName', 'Public SDK Test 1'), - JMESPathCheck('details.contactDetails.emailList[0]', 'testing1@microsoft.com')]) - - self.cmd('databox job list ' - '--resource-group {rg}', - checks=[JMESPathCheck('length(@)', 1)]) - - self.cmd('databox job cancel ' - '--resource-group {rg} ' - '--name {job_name} ' - '--reason "CancelTest" ' - '-y', - checks=[]) - - self.cmd('databox job show ' - '--resource-group {rg} ' - '--name {job_name}', - checks=[ - JMESPathCheck('name', job_name), - JMESPathCheck('isCancellable', False), - JMESPathCheck('isDeletable', True)]) - - self.cmd('databox job delete ' - '--resource-group {rg} ' - '--name {job_name} ' - '-y', - checks=[]) - - self.cmd('databox job show ' - '--resource-group {rg} ' - '--name {job_name}', - expect_failure=True) - - # Create another databox job with sku 'DataBoxDisk'. - self.cmd('databox job create ' - '--resource-group {rg} ' - '--name {job_name_2} ' - '--location westus ' - '--sku DataBoxDisk ' - '--expected-data-size 1 ' - '--contact-name "Public SDK Test" ' - '--phone 14258828080 ' - '--email-list testing@microsoft.com ' - '--street-address1 "1 MICROSOFT WAY" ' - '--city Redmond ' - '--state-or-province WA ' - '--country US ' - '--postal-code 98052 ' - '--company-name Microsoft ' - '--storage-account {storage_account_1}', - checks=[JMESPathCheck('status', 'DeviceOrdered')]) - - self.cmd('databox job cancel ' - '--resource-group {rg} ' - '--name {job_name_2} ' - '--reason "CancelTest" ' - '-y', - checks=[]) - - self.cmd('databox job delete ' - '--resource-group {rg} ' - '--name {job_name_2} ' - '-y', - checks=[]) - - self.cmd('databox job show ' - '--resource-group {rg} ' - '--name {job_name_2}', - expect_failure=True) - - # DataBox service will create a lock 'DATABOX_SERVICE' on the storage account under the resource group when creating a job. In order to clean up the resource group, we need delete the lock first. - self.cmd('lock delete ' - '--name DATABOX_SERVICE ' - '-g {rg} ' - '--resource-name {storage_account_1} ' - '--resource-type Microsoft.Storage/storageAccounts') - - self.cmd('lock delete ' - '--name DATABOX_SERVICE ' - '-g {rg} ' - '--resource-name {storage_account_2} ' - '--resource-type Microsoft.Storage/storageAccounts') + + @ResourceGroupPreparer(name_prefix='clitestdatabox_databoxbvt'[:7], key='rg_5', parameter_name='rg_5') + @ResourceGroupPreparer(name_prefix='clitestdatabox_databoxbvt1'[:7], key='rg_6', parameter_name='rg_6') + @ResourceGroupPreparer(name_prefix='clitestdatabox_akvenkat'[:7], key='rg_9', parameter_name='rg_9') + @ResourceGroupPreparer(name_prefix='clitestdatabox_SdkRg5154'[:7], key='rg', parameter_name='rg') + @ResourceGroupPreparer(name_prefix='clitestdatabox_SdkRg7937'[:7], key='rg_2', parameter_name='rg_2') + @ResourceGroupPreparer(name_prefix='clitestdatabox_dmstestresource'[:7], key='rg_3', parameter_name='rg_3') + @ResourceGroupPreparer(name_prefix='clitestdatabox_SdkRg8091'[:7], key='rg_4', parameter_name='rg_4') + @ResourceGroupPreparer(name_prefix='clitestdatabox_SdkRg7478'[:7], key='rg_7', parameter_name='rg_7') + @ResourceGroupPreparer(name_prefix='clitestdatabox_SdkRg608'[:7], key='rg_8', parameter_name='rg_8') + @ResourceGroupPreparer(name_prefix='clitestdatabox_SdkRg7552'[:7], key='rg_10', parameter_name='rg_10') + @ResourceGroupPreparer(name_prefix='clitestdatabox_SdkRg9765'[:7], key='rg_11', parameter_name='rg_11') + @ResourceGroupPreparer(name_prefix='clitestdatabox_bvttoolrg6'[:7], key='rg_12', parameter_name='rg_12') + @ResourceGroupPreparer(name_prefix='clitestdatabox_SdkRg9836'[:7], key='rg_13', parameter_name='rg_13') + @ResourceGroupPreparer(name_prefix='clitestdatabox_SdkRg4981'[:7], key='rg_14', parameter_name='rg_14') + @ResourceGroupPreparer(name_prefix='clitestdatabox_SdkRg6861'[:7], key='rg_15', parameter_name='rg_15') + @StorageAccountPreparer(name_prefix='clitestdatabox_databoxbvttestaccount'[:7], key='sa', + resource_group_parameter_name='rg_5') + @StorageAccountPreparer(name_prefix='clitestdatabox_databoxbvttestaccount2'[:7], key='sa_2', + resource_group_parameter_name='rg_6') + @StorageAccountPreparer(name_prefix='clitestdatabox_aaaaaa2'[:7], key='sa_3', + resource_group_parameter_name='rg_9') + def test_databox_Scenario(self, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, + rg_14, rg_15): + call_scenario(self, rg_5, rg_6, rg_9, rg, rg_2, rg_3, rg_4, rg_7, rg_8, rg_10, rg_11, rg_12, rg_13, rg_14, rg_15) + calc_coverage(__file__) + raise_if() + diff --git a/src/databox/azext_databox/vendored_sdks/__init__.py b/src/databox/azext_databox/vendored_sdks/__init__.py index 7183870ee56..c9cfdc73e77 100644 --- a/src/databox/azext_databox/vendored_sdks/__init__.py +++ b/src/databox/azext_databox/vendored_sdks/__init__.py @@ -1,6 +1,12 @@ -# -------------------------------------------------------------------------------------------- +# coding=utf-8 +# -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- -__path__ = __import__('pkgutil').extend_path(__path__, __name__) \ No newline at end of file +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/src/databox/azext_databox/vendored_sdks/databox/_configuration.py b/src/databox/azext_databox/vendored_sdks/databox/_configuration.py index 5b8f07cf262..229fee90820 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/_configuration.py +++ b/src/databox/azext_databox/vendored_sdks/databox/_configuration.py @@ -1,21 +1,24 @@ # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# +# Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any + +from typing import TYPE_CHECKING from azure.core.configuration import Configuration from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMHttpLoggingPolicy -from ._version import VERSION +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any + + from azure.core.credentials import TokenCredential +VERSION = "unknown" class DataBoxManagementClientConfiguration(Configuration): """Configuration for DataBoxManagementClient. @@ -44,8 +47,9 @@ def __init__( self.credential = credential self.subscription_id = subscription_id + self.api_version = "2021-08-01-preview" self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) - kwargs.setdefault('sdk_moniker', 'azure-mgmt-databox/{}'.format(VERSION)) + kwargs.setdefault('sdk_moniker', 'databoxmanagementclient/{}'.format(VERSION)) self._configure(**kwargs) def _configure( diff --git a/src/databox/azext_databox/vendored_sdks/databox/_data_box_management_client.py b/src/databox/azext_databox/vendored_sdks/databox/_data_box_management_client.py index aa0fa23980a..7821ea5ad3b 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/_data_box_management_client.py +++ b/src/databox/azext_databox/vendored_sdks/databox/_data_box_management_client.py @@ -1,175 +1,80 @@ # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# +# Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from azure.mgmt.core import ARMPipelineClient -from msrest import Serializer, Deserializer - -from azure.profiles import KnownProfiles, ProfileDefinition -from azure.profiles.multiapiclient import MultiApiClientMixin -from ._configuration import DataBoxManagementClientConfiguration +from typing import TYPE_CHECKING -class _SDKClient(object): - def __init__(self, *args, **kwargs): - """This is a fake class to support current implemetation of MultiApiClientMixin." - Will be removed in final version of multiapi azure-core based client - """ - pass +from azure.mgmt.core import ARMPipelineClient +from msrest import Deserializer, Serializer -class DataBoxManagementClient(MultiApiClientMixin, _SDKClient): - """The DataBox Client. +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Optional - This ready contains multiple API versions, to help you deal with all of the Azure clouds - (Azure Stack, Azure Government, Azure China, etc.). - By default, it uses the latest API version available on public Azure. - For production, you should stick to a particular api-version and/or profile. - The profile sets a mapping between an operation group and its API version. - The api-version parameter sets the default API version if the operation - group is not described in the profile. + from azure.core.credentials import TokenCredential +from ._configuration import DataBoxManagementClientConfiguration +from .operations import Operations +from .operations import JobsOperations +from .operations import DataBoxManagementClientOperationsMixin +from .operations import ServiceOperations +from . import models + + +class DataBoxManagementClient(DataBoxManagementClientOperationsMixin): + """DataBoxManagementClient. + + :ivar operations: Operations operations + :vartype operations: data_box_management_client.operations.Operations + :ivar jobs: JobsOperations operations + :vartype jobs: data_box_management_client.operations.JobsOperations + :ivar service: ServiceOperations operations + :vartype service: data_box_management_client.operations.ServiceOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials.TokenCredential :param subscription_id: The Subscription Id. :type subscription_id: str - :param str api_version: API version to use if no profile is provided, or if - missing in profile. :param str base_url: Service URL - :param profile: A profile definition, from KnownProfiles to dict. - :type profile: azure.profiles.KnownProfiles :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ - DEFAULT_API_VERSION = '2019-09-01' - _PROFILE_TAG = "azure.mgmt.databox.DataBoxManagementClient" - LATEST_PROFILE = ProfileDefinition({ - _PROFILE_TAG: { - None: DEFAULT_API_VERSION, - }}, - _PROFILE_TAG + " latest" - ) - def __init__( self, credential, # type: "TokenCredential" subscription_id, # type: str - api_version=None, - base_url=None, - profile=KnownProfiles.default, + base_url=None, # type: Optional[str] **kwargs # type: Any ): + # type: (...) -> None if not base_url: base_url = 'https://management.azure.com' self._config = DataBoxManagementClientConfiguration(credential, subscription_id, **kwargs) self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) - super(DataBoxManagementClient, self).__init__( - api_version=api_version, - profile=profile - ) - - @classmethod - def _models_dict(cls, api_version): - return {k: v for k, v in cls.models(api_version).__dict__.items() if isinstance(v, type)} - - @classmethod - def models(cls, api_version=DEFAULT_API_VERSION): - """Module depends on the API version: - * 2018-01-01: :mod:`v2018_01_01.models` - * 2019-09-01: :mod:`v2019_09_01.models` - * 2020-04-01: :mod:`v2020_04_01.models` - * 2020-11-01: :mod:`v2020_11_01.models` - """ - if api_version == '2018-01-01': - from .v2018_01_01 import models - return models - elif api_version == '2019-09-01': - from .v2019_09_01 import models - return models - elif api_version == '2020-04-01': - from .v2020_04_01 import models - return models - elif api_version == '2020-11-01': - from .v2020_11_01 import models - return models - raise ValueError("API version {} is not available".format(api_version)) + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) - @property - def jobs(self): - """Instance depends on the API version: - - * 2018-01-01: :class:`JobsOperations` - * 2019-09-01: :class:`JobsOperations` - * 2020-04-01: :class:`JobsOperations` - * 2020-11-01: :class:`JobsOperations` - """ - api_version = self._get_api_version('jobs') - if api_version == '2018-01-01': - from .v2018_01_01.operations import JobsOperations as OperationClass - elif api_version == '2019-09-01': - from .v2019_09_01.operations import JobsOperations as OperationClass - elif api_version == '2020-04-01': - from .v2020_04_01.operations import JobsOperations as OperationClass - elif api_version == '2020-11-01': - from .v2020_11_01.operations import JobsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'jobs'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def operations(self): - """Instance depends on the API version: - - * 2018-01-01: :class:`Operations` - * 2019-09-01: :class:`Operations` - * 2020-04-01: :class:`Operations` - * 2020-11-01: :class:`Operations` - """ - api_version = self._get_api_version('operations') - if api_version == '2018-01-01': - from .v2018_01_01.operations import Operations as OperationClass - elif api_version == '2019-09-01': - from .v2019_09_01.operations import Operations as OperationClass - elif api_version == '2020-04-01': - from .v2020_04_01.operations import Operations as OperationClass - elif api_version == '2020-11-01': - from .v2020_11_01.operations import Operations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'operations'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def service(self): - """Instance depends on the API version: - - * 2018-01-01: :class:`ServiceOperations` - * 2019-09-01: :class:`ServiceOperations` - * 2020-04-01: :class:`ServiceOperations` - * 2020-11-01: :class:`ServiceOperations` - """ - api_version = self._get_api_version('service') - if api_version == '2018-01-01': - from .v2018_01_01.operations import ServiceOperations as OperationClass - elif api_version == '2019-09-01': - from .v2019_09_01.operations import ServiceOperations as OperationClass - elif api_version == '2020-04-01': - from .v2020_04_01.operations import ServiceOperations as OperationClass - elif api_version == '2020-11-01': - from .v2020_11_01.operations import ServiceOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'service'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) + self.operations = Operations( + self._client, self._config, self._serialize, self._deserialize) + self.jobs = JobsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.service = ServiceOperations( + self._client, self._config, self._serialize, self._deserialize) def close(self): + # type: () -> None self._client.close() + def __enter__(self): + # type: () -> DataBoxManagementClient self._client.__enter__() return self + def __exit__(self, *exc_details): + # type: (Any) -> None self._client.__exit__(*exc_details) diff --git a/src/databox/azext_databox/vendored_sdks/databox/aio/_configuration.py b/src/databox/azext_databox/vendored_sdks/databox/aio/_configuration.py index 71fd4699f32..9019a25fb4a 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/aio/_configuration.py +++ b/src/databox/azext_databox/vendored_sdks/databox/aio/_configuration.py @@ -1,21 +1,22 @@ # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# +# Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any + +from typing import Any, TYPE_CHECKING from azure.core.configuration import Configuration from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMHttpLoggingPolicy -from .._version import VERSION +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential +VERSION = "unknown" class DataBoxManagementClientConfiguration(Configuration): """Configuration for DataBoxManagementClient. @@ -31,9 +32,9 @@ class DataBoxManagementClientConfiguration(Configuration): def __init__( self, - credential, # type: "AsyncTokenCredential" - subscription_id, # type: str - **kwargs # type: Any + credential: "AsyncTokenCredential", + subscription_id: str, + **kwargs: Any ) -> None: if credential is None: raise ValueError("Parameter 'credential' must not be None.") @@ -43,8 +44,9 @@ def __init__( self.credential = credential self.subscription_id = subscription_id + self.api_version = "2021-08-01-preview" self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) - kwargs.setdefault('sdk_moniker', 'azure-mgmt-databox/{}'.format(VERSION)) + kwargs.setdefault('sdk_moniker', 'databoxmanagementclient/{}'.format(VERSION)) self._configure(**kwargs) def _configure( diff --git a/src/databox/azext_databox/vendored_sdks/databox/aio/_data_box_management_client.py b/src/databox/azext_databox/vendored_sdks/databox/aio/_data_box_management_client.py index 3d3baff1574..a8aea887805 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/aio/_data_box_management_client.py +++ b/src/databox/azext_databox/vendored_sdks/databox/aio/_data_box_management_client.py @@ -1,175 +1,74 @@ # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# +# Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from azure.mgmt.core import AsyncARMPipelineClient -from msrest import Serializer, Deserializer - -from azure.profiles import KnownProfiles, ProfileDefinition -from azure.profiles.multiapiclient import MultiApiClientMixin -from ._configuration import DataBoxManagementClientConfiguration - -class _SDKClient(object): - def __init__(self, *args, **kwargs): - """This is a fake class to support current implemetation of MultiApiClientMixin." - Will be removed in final version of multiapi azure-core based client - """ - pass +from typing import Any, Optional, TYPE_CHECKING -class DataBoxManagementClient(MultiApiClientMixin, _SDKClient): - """The DataBox Client. +from azure.mgmt.core import AsyncARMPipelineClient +from msrest import Deserializer, Serializer - This ready contains multiple API versions, to help you deal with all of the Azure clouds - (Azure Stack, Azure Government, Azure China, etc.). - By default, it uses the latest API version available on public Azure. - For production, you should stick to a particular api-version and/or profile. - The profile sets a mapping between an operation group and its API version. - The api-version parameter sets the default API version if the operation - group is not described in the profile. +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential +from ._configuration import DataBoxManagementClientConfiguration +from .operations import Operations +from .operations import JobsOperations +from .operations import DataBoxManagementClientOperationsMixin +from .operations import ServiceOperations +from .. import models + + +class DataBoxManagementClient(DataBoxManagementClientOperationsMixin): + """DataBoxManagementClient. + + :ivar operations: Operations operations + :vartype operations: data_box_management_client.aio.operations.Operations + :ivar jobs: JobsOperations operations + :vartype jobs: data_box_management_client.aio.operations.JobsOperations + :ivar service: ServiceOperations operations + :vartype service: data_box_management_client.aio.operations.ServiceOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: The Subscription Id. :type subscription_id: str - :param str api_version: API version to use if no profile is provided, or if - missing in profile. :param str base_url: Service URL - :param profile: A profile definition, from KnownProfiles to dict. - :type profile: azure.profiles.KnownProfiles :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ - DEFAULT_API_VERSION = '2020-11-01' - _PROFILE_TAG = "azure.mgmt.databox.DataBoxManagementClient" - LATEST_PROFILE = ProfileDefinition({ - _PROFILE_TAG: { - None: DEFAULT_API_VERSION, - }}, - _PROFILE_TAG + " latest" - ) - def __init__( self, - credential, # type: "AsyncTokenCredential" - subscription_id, # type: str - api_version=None, - base_url=None, - profile=KnownProfiles.default, - **kwargs # type: Any + credential: "AsyncTokenCredential", + subscription_id: str, + base_url: Optional[str] = None, + **kwargs: Any ) -> None: if not base_url: base_url = 'https://management.azure.com' self._config = DataBoxManagementClientConfiguration(credential, subscription_id, **kwargs) self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) - super(DataBoxManagementClient, self).__init__( - api_version=api_version, - profile=profile - ) - - @classmethod - def _models_dict(cls, api_version): - return {k: v for k, v in cls.models(api_version).__dict__.items() if isinstance(v, type)} - @classmethod - def models(cls, api_version=DEFAULT_API_VERSION): - """Module depends on the API version: + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) - * 2018-01-01: :mod:`v2018_01_01.models` - * 2019-09-01: :mod:`v2019_09_01.models` - * 2020-04-01: :mod:`v2020_04_01.models` - * 2020-11-01: :mod:`v2020_11_01.models` - """ - if api_version == '2018-01-01': - from ..v2018_01_01 import models - return models - elif api_version == '2019-09-01': - from ..v2019_09_01 import models - return models - elif api_version == '2020-04-01': - from ..v2020_04_01 import models - return models - elif api_version == '2020-11-01': - from ..v2020_11_01 import models - return models - raise ValueError("API version {} is not available".format(api_version)) + self.operations = Operations( + self._client, self._config, self._serialize, self._deserialize) + self.jobs = JobsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.service = ServiceOperations( + self._client, self._config, self._serialize, self._deserialize) - @property - def jobs(self): - """Instance depends on the API version: - - * 2018-01-01: :class:`JobsOperations` - * 2019-09-01: :class:`JobsOperations` - * 2020-04-01: :class:`JobsOperations` - * 2020-11-01: :class:`JobsOperations` - """ - api_version = self._get_api_version('jobs') - if api_version == '2018-01-01': - from ..v2018_01_01.aio.operations import JobsOperations as OperationClass - elif api_version == '2019-09-01': - from ..v2019_09_01.aio.operations import JobsOperations as OperationClass - elif api_version == '2020-04-01': - from ..v2020_04_01.aio.operations import JobsOperations as OperationClass - elif api_version == '2020-11-01': - from ..v2020_11_01.aio.operations import JobsOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'jobs'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def operations(self): - """Instance depends on the API version: - - * 2018-01-01: :class:`Operations` - * 2019-09-01: :class:`Operations` - * 2020-04-01: :class:`Operations` - * 2020-11-01: :class:`Operations` - """ - api_version = self._get_api_version('operations') - if api_version == '2018-01-01': - from ..v2018_01_01.aio.operations import Operations as OperationClass - elif api_version == '2019-09-01': - from ..v2019_09_01.aio.operations import Operations as OperationClass - elif api_version == '2020-04-01': - from ..v2020_04_01.aio.operations import Operations as OperationClass - elif api_version == '2020-11-01': - from ..v2020_11_01.aio.operations import Operations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'operations'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - @property - def service(self): - """Instance depends on the API version: - - * 2018-01-01: :class:`ServiceOperations` - * 2019-09-01: :class:`ServiceOperations` - * 2020-04-01: :class:`ServiceOperations` - * 2020-11-01: :class:`ServiceOperations` - """ - api_version = self._get_api_version('service') - if api_version == '2018-01-01': - from ..v2018_01_01.aio.operations import ServiceOperations as OperationClass - elif api_version == '2019-09-01': - from ..v2019_09_01.aio.operations import ServiceOperations as OperationClass - elif api_version == '2020-04-01': - from ..v2020_04_01.aio.operations import ServiceOperations as OperationClass - elif api_version == '2020-11-01': - from ..v2020_11_01.aio.operations import ServiceOperations as OperationClass - else: - raise ValueError("API version {} does not have operation group 'service'".format(api_version)) - return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version))) - - async def close(self): + async def close(self) -> None: await self._client.close() - async def __aenter__(self): + + async def __aenter__(self) -> "DataBoxManagementClient": await self._client.__aenter__() return self - async def __aexit__(self, *exc_details): + + async def __aexit__(self, *exc_details) -> None: await self._client.__aexit__(*exc_details) diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/operations/__init__.py b/src/databox/azext_databox/vendored_sdks/databox/aio/operations/__init__.py similarity index 83% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/operations/__init__.py rename to src/databox/azext_databox/vendored_sdks/databox/aio/operations/__init__.py index 9c8fa7a8253..bd13cc67afb 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/operations/__init__.py +++ b/src/databox/azext_databox/vendored_sdks/databox/aio/operations/__init__.py @@ -8,10 +8,12 @@ from ._operations import Operations from ._jobs_operations import JobsOperations +from ._data_box_management_client_operations import DataBoxManagementClientOperationsMixin from ._service_operations import ServiceOperations __all__ = [ 'Operations', 'JobsOperations', + 'DataBoxManagementClientOperationsMixin', 'ServiceOperations', ] diff --git a/src/databox/azext_databox/vendored_sdks/databox/aio/operations/_data_box_management_client_operations.py b/src/databox/azext_databox/vendored_sdks/databox/aio/operations/_data_box_management_client_operations.py new file mode 100644 index 00000000000..f0b0ee29b8e --- /dev/null +++ b/src/databox/azext_databox/vendored_sdks/databox/aio/operations/_data_box_management_client_operations.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class DataBoxManagementClientOperationsMixin: + + async def mitigate( + self, + job_name: str, + resource_group_name: str, + mitigate_job_request: "models.MitigateJobRequest", + **kwargs + ) -> None: + """Request to mitigate for a given job. + + :param job_name: The name of the job Resource within the specified resource group. job names + must be between 3 and 24 characters in length and use any alphanumeric and underscore only. + :type job_name: str + :param resource_group_name: The Resource Group Name. + :type resource_group_name: str + :param mitigate_job_request: Mitigation Request. + :type mitigate_job_request: ~data_box_management_client.models.MitigateJobRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.mitigate.metadata['url'] # type: ignore + path_format_arguments = { + 'jobName': self._serialize.url("job_name", job_name, 'str', max_length=24, min_length=3, pattern=r'^[-\w\.]+$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(mitigate_job_request, 'MitigateJobRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + mitigate.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBox/jobs/{jobName}/mitigate'} # type: ignore diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/operations/_jobs_operations.py b/src/databox/azext_databox/vendored_sdks/databox/aio/operations/_jobs_operations.py similarity index 82% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/operations/_jobs_operations.py rename to src/databox/azext_databox/vendored_sdks/databox/aio/operations/_jobs_operations.py index f01a52d0a1a..0d2656c5caa 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/operations/_jobs_operations.py +++ b/src/databox/azext_databox/vendored_sdks/databox/aio/operations/_jobs_operations.py @@ -16,7 +16,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling -from ... import models as _models +from ... import models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -28,14 +28,14 @@ class JobsOperations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.databox.models + :type models: ~data_box_management_client.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ - models = _models + models = models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -47,7 +47,7 @@ def list( self, skip_token: Optional[str] = None, **kwargs - ) -> AsyncIterable["_models.JobResourceList"]: + ) -> AsyncIterable["models.JobResourceList"]: """Lists all the jobs available under the subscription. :param skip_token: $skipToken is supported on Get list of jobs, which provides the next page in @@ -55,15 +55,15 @@ def list( :type skip_token: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either JobResourceList or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databox.models.JobResourceList] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_box_management_client.models.JobResourceList] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResourceList"] + cls = kwargs.pop('cls', None) # type: ClsType["models.JobResourceList"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" accept = "application/json" def prepare_request(next_link=None): @@ -105,8 +105,9 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -115,12 +116,77 @@ async def get_next(next_link=None): ) list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataBox/jobs'} # type: ignore + async def mark_devices_shipped( + self, + job_name: str, + resource_group_name: str, + mark_devices_shipped_request: "models.MarkDevicesShippedRequest", + **kwargs + ) -> None: + """Request to mark devices for a given job as shipped. + + :param job_name: The name of the job Resource within the specified resource group. job names + must be between 3 and 24 characters in length and use any alphanumeric and underscore only. + :type job_name: str + :param resource_group_name: The Resource Group Name. + :type resource_group_name: str + :param mark_devices_shipped_request: Mark Devices Shipped Request. + :type mark_devices_shipped_request: ~data_box_management_client.models.MarkDevicesShippedRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.mark_devices_shipped.metadata['url'] # type: ignore + path_format_arguments = { + 'jobName': self._serialize.url("job_name", job_name, 'str', max_length=24, min_length=3, pattern=r'^[-\w\.]+$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(mark_devices_shipped_request, 'MarkDevicesShippedRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + mark_devices_shipped.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBox/jobs/{jobName}/markDevicesShipped'} # type: ignore + def list_by_resource_group( self, resource_group_name: str, skip_token: Optional[str] = None, **kwargs - ) -> AsyncIterable["_models.JobResourceList"]: + ) -> AsyncIterable["models.JobResourceList"]: """Lists all the jobs available under the given resource group. :param resource_group_name: The Resource Group Name. @@ -130,15 +196,15 @@ def list_by_resource_group( :type skip_token: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either JobResourceList or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databox.models.JobResourceList] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_box_management_client.models.JobResourceList] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResourceList"] + cls = kwargs.pop('cls', None) # type: ClsType["models.JobResourceList"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" accept = "application/json" def prepare_request(next_link=None): @@ -181,8 +247,9 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -197,7 +264,7 @@ async def get( job_name: str, expand: Optional[str] = None, **kwargs - ) -> "_models.JobResource": + ) -> "models.JobResource": """Gets information about the specified job. :param resource_group_name: The Resource Group Name. @@ -210,15 +277,15 @@ async def get( :type expand: str :keyword callable cls: A custom type or function that will be passed the direct response :return: JobResource, or the result of cls(response) - :rtype: ~azure.mgmt.databox.models.JobResource + :rtype: ~data_box_management_client.models.JobResource :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResource"] + cls = kwargs.pop('cls', None) # type: ClsType["models.JobResource"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" accept = "application/json" # Construct URL @@ -246,7 +313,8 @@ async def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('JobResource', pipeline_response) @@ -260,15 +328,15 @@ async def _create_initial( self, resource_group_name: str, job_name: str, - job_resource: "_models.JobResource", + job_resource: "models.JobResource", **kwargs - ) -> Optional["_models.JobResource"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.JobResource"]] + ) -> Optional["models.JobResource"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.JobResource"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -299,7 +367,8 @@ async def _create_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: @@ -315,9 +384,9 @@ async def begin_create( self, resource_group_name: str, job_name: str, - job_resource: "_models.JobResource", + job_resource: "models.JobResource", **kwargs - ) -> AsyncLROPoller["_models.JobResource"]: + ) -> AsyncLROPoller["models.JobResource"]: """Creates a new job with the specified parameters. Existing job cannot be updated with this API and should instead be updated with the Update job API. @@ -327,7 +396,7 @@ async def begin_create( must be between 3 and 24 characters in length and use any alphanumeric and underscore only. :type job_name: str :param job_resource: Job details from request body. - :type job_resource: ~azure.mgmt.databox.models.JobResource + :type job_resource: ~data_box_management_client.models.JobResource :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a @@ -335,11 +404,11 @@ async def begin_create( :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either JobResource or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databox.models.JobResource] + :rtype: ~azure.core.polling.AsyncLROPoller[~data_box_management_client.models.JobResource] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResource"] + cls = kwargs.pop('cls', None) # type: ClsType["models.JobResource"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -395,7 +464,7 @@ async def _delete_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" accept = "application/json" # Construct URL @@ -419,9 +488,10 @@ async def _delete_initial( pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [202, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) @@ -497,16 +567,16 @@ async def _update_initial( self, resource_group_name: str, job_name: str, - job_resource_update_parameter: "_models.JobResourceUpdateParameter", + job_resource_update_parameter: "models.JobResourceUpdateParameter", if_match: Optional[str] = None, **kwargs - ) -> Optional["_models.JobResource"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.JobResource"]] + ) -> Optional["models.JobResource"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.JobResource"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -539,7 +609,8 @@ async def _update_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: @@ -555,10 +626,10 @@ async def begin_update( self, resource_group_name: str, job_name: str, - job_resource_update_parameter: "_models.JobResourceUpdateParameter", + job_resource_update_parameter: "models.JobResourceUpdateParameter", if_match: Optional[str] = None, **kwargs - ) -> AsyncLROPoller["_models.JobResource"]: + ) -> AsyncLROPoller["models.JobResource"]: """Updates the properties of an existing job. :param resource_group_name: The Resource Group Name. @@ -567,7 +638,7 @@ async def begin_update( must be between 3 and 24 characters in length and use any alphanumeric and underscore only. :type job_name: str :param job_resource_update_parameter: Job update parameters from request body. - :type job_resource_update_parameter: ~azure.mgmt.databox.models.JobResourceUpdateParameter + :type job_resource_update_parameter: ~data_box_management_client.models.JobResourceUpdateParameter :param if_match: Defines the If-Match condition. The patch will be performed only if the ETag of the job on the server matches this value. :type if_match: str @@ -578,11 +649,11 @@ async def begin_update( :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either JobResource or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databox.models.JobResource] + :rtype: ~azure.core.polling.AsyncLROPoller[~data_box_management_client.models.JobResource] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResource"] + cls = kwargs.pop('cls', None) # type: ClsType["models.JobResource"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -632,9 +703,9 @@ async def book_shipment_pick_up( self, resource_group_name: str, job_name: str, - shipment_pick_up_request: "_models.ShipmentPickUpRequest", + shipment_pick_up_request: "models.ShipmentPickUpRequest", **kwargs - ) -> "_models.ShipmentPickUpResponse": + ) -> "models.ShipmentPickUpResponse": """Book shipment pick up. :param resource_group_name: The Resource Group Name. @@ -643,18 +714,18 @@ async def book_shipment_pick_up( must be between 3 and 24 characters in length and use any alphanumeric and underscore only. :type job_name: str :param shipment_pick_up_request: Details of shipment pick up request. - :type shipment_pick_up_request: ~azure.mgmt.databox.models.ShipmentPickUpRequest + :type shipment_pick_up_request: ~data_box_management_client.models.ShipmentPickUpRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ShipmentPickUpResponse, or the result of cls(response) - :rtype: ~azure.mgmt.databox.models.ShipmentPickUpResponse + :rtype: ~data_box_management_client.models.ShipmentPickUpResponse :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ShipmentPickUpResponse"] + cls = kwargs.pop('cls', None) # type: ClsType["models.ShipmentPickUpResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -685,7 +756,8 @@ async def book_shipment_pick_up( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ShipmentPickUpResponse', pipeline_response) @@ -699,7 +771,7 @@ async def cancel( self, resource_group_name: str, job_name: str, - cancellation_reason: "_models.CancellationReason", + cancellation_reason: "models.CancellationReason", **kwargs ) -> None: """CancelJob. @@ -710,7 +782,7 @@ async def cancel( must be between 3 and 24 characters in length and use any alphanumeric and underscore only. :type job_name: str :param cancellation_reason: Reason for cancellation. - :type cancellation_reason: ~azure.mgmt.databox.models.CancellationReason + :type cancellation_reason: ~data_box_management_client.models.CancellationReason :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None @@ -721,7 +793,7 @@ async def cancel( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -752,7 +824,8 @@ async def cancel( if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) @@ -764,7 +837,7 @@ def list_credentials( resource_group_name: str, job_name: str, **kwargs - ) -> AsyncIterable["_models.UnencryptedCredentialsList"]: + ) -> AsyncIterable["models.UnencryptedCredentialsList"]: """This method gets the unencrypted secrets related to the job. :param resource_group_name: The Resource Group Name. @@ -774,15 +847,15 @@ def list_credentials( :type job_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either UnencryptedCredentialsList or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databox.models.UnencryptedCredentialsList] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_box_management_client.models.UnencryptedCredentialsList] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.UnencryptedCredentialsList"] + cls = kwargs.pop('cls', None) # type: ClsType["models.UnencryptedCredentialsList"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" accept = "application/json" def prepare_request(next_link=None): @@ -824,8 +897,9 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/operations/_operations.py b/src/databox/azext_databox/vendored_sdks/databox/aio/operations/_operations.py similarity index 88% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/operations/_operations.py rename to src/databox/azext_databox/vendored_sdks/databox/aio/operations/_operations.py index bd99838be8c..d684839ee5f 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/operations/_operations.py +++ b/src/databox/azext_databox/vendored_sdks/databox/aio/operations/_operations.py @@ -14,7 +14,7 @@ from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat -from ... import models as _models +from ... import models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -26,14 +26,14 @@ class Operations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.databox.models + :type models: ~data_box_management_client.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ - models = _models + models = models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -44,20 +44,20 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, **kwargs - ) -> AsyncIterable["_models.OperationList"]: + ) -> AsyncIterable["models.OperationList"]: """This method gets all the operations. :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either OperationList or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databox.models.OperationList] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_box_management_client.models.OperationList] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationList"] + cls = kwargs.pop('cls', None) # type: ClsType["models.OperationList"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" accept = "application/json" def prepare_request(next_link=None): @@ -93,8 +93,9 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/operations/_service_operations.py b/src/databox/azext_databox/vendored_sdks/databox/aio/operations/_service_operations.py similarity index 73% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/operations/_service_operations.py rename to src/databox/azext_databox/vendored_sdks/databox/aio/operations/_service_operations.py index a07ce55b508..301bbfddf8b 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/operations/_service_operations.py +++ b/src/databox/azext_databox/vendored_sdks/databox/aio/operations/_service_operations.py @@ -14,7 +14,7 @@ from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat -from ... import models as _models +from ... import models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] @@ -26,14 +26,14 @@ class ServiceOperations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.databox.models + :type models: ~data_box_management_client.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ - models = _models + models = models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client @@ -41,94 +41,13 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config - def list_available_skus( - self, - location: str, - available_sku_request: "_models.AvailableSkuRequest", - **kwargs - ) -> AsyncIterable["_models.AvailableSkusResult"]: - """This method provides the list of available skus for the given subscription and location. - - :param location: The location of the resource. - :type location: str - :param available_sku_request: Filters for showing the available skus. - :type available_sku_request: ~azure.mgmt.databox.models.AvailableSkuRequest - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either AvailableSkusResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databox.models.AvailableSkusResult] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.AvailableSkusResult"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" - content_type = "application/json" - accept = "application/json" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - if not next_link: - # Construct URL - url = self.list_available_skus.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'location': self._serialize.url("location", location, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(available_sku_request, 'AvailableSkuRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(available_sku_request, 'AvailableSkuRequest') - body_content_kwargs['content'] = body_content - request = self._client.get(url, query_parameters, header_parameters, **body_content_kwargs) - return request - - async def extract_data(pipeline_response): - deserialized = self._deserialize('AvailableSkusResult', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return AsyncItemPaged( - get_next, extract_data - ) - list_available_skus.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataBox/locations/{location}/availableSkus'} # type: ignore - def list_available_skus_by_resource_group( self, resource_group_name: str, location: str, - available_sku_request: "_models.AvailableSkuRequest", + available_sku_request: "models.AvailableSkuRequest", **kwargs - ) -> AsyncIterable["_models.AvailableSkusResult"]: + ) -> AsyncIterable["models.AvailableSkusResult"]: """This method provides the list of available skus for the given subscription, resource group and location. @@ -137,18 +56,18 @@ def list_available_skus_by_resource_group( :param location: The location of the resource. :type location: str :param available_sku_request: Filters for showing the available skus. - :type available_sku_request: ~azure.mgmt.databox.models.AvailableSkuRequest + :type available_sku_request: ~data_box_management_client.models.AvailableSkuRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either AvailableSkusResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databox.models.AvailableSkusResult] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~data_box_management_client.models.AvailableSkusResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.AvailableSkusResult"] + cls = kwargs.pop('cls', None) # type: ClsType["models.AvailableSkusResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" content_type = "application/json" accept = "application/json" @@ -198,8 +117,9 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -211,27 +131,27 @@ async def get_next(next_link=None): async def validate_address( self, location: str, - validate_address: "_models.ValidateAddress", + validate_address: "models.ValidateAddress", **kwargs - ) -> "_models.AddressValidationOutput": - """[DEPRECATED NOTICE: This operation will soon be removed] This method validates the customer + ) -> "models.AddressValidationOutput": + """[DEPRECATED NOTICE: This operation will soon be removed]. This method validates the customer shipping address and provide alternate addresses if any. :param location: The location of the resource. :type location: str :param validate_address: Shipping address of the customer. - :type validate_address: ~azure.mgmt.databox.models.ValidateAddress + :type validate_address: ~data_box_management_client.models.ValidateAddress :keyword callable cls: A custom type or function that will be passed the direct response :return: AddressValidationOutput, or the result of cls(response) - :rtype: ~azure.mgmt.databox.models.AddressValidationOutput + :rtype: ~data_box_management_client.models.AddressValidationOutput :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.AddressValidationOutput"] + cls = kwargs.pop('cls', None) # type: ClsType["models.AddressValidationOutput"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -261,7 +181,8 @@ async def validate_address( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('AddressValidationOutput', pipeline_response) @@ -275,9 +196,9 @@ async def validate_inputs_by_resource_group( self, resource_group_name: str, location: str, - validation_request: "_models.ValidationRequest", + validation_request: "models.ValidationRequest", **kwargs - ) -> "_models.ValidationResponse": + ) -> "models.ValidationResponse": """This method does all necessary pre-job creation validation under resource group. :param resource_group_name: The Resource Group Name. @@ -285,18 +206,18 @@ async def validate_inputs_by_resource_group( :param location: The location of the resource. :type location: str :param validation_request: Inputs of the customer. - :type validation_request: ~azure.mgmt.databox.models.ValidationRequest + :type validation_request: ~data_box_management_client.models.ValidationRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ValidationResponse, or the result of cls(response) - :rtype: ~azure.mgmt.databox.models.ValidationResponse + :rtype: ~data_box_management_client.models.ValidationResponse :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ValidationResponse"] + cls = kwargs.pop('cls', None) # type: ClsType["models.ValidationResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -327,7 +248,8 @@ async def validate_inputs_by_resource_group( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ValidationResponse', pipeline_response) @@ -340,26 +262,26 @@ async def validate_inputs_by_resource_group( async def validate_inputs( self, location: str, - validation_request: "_models.ValidationRequest", + validation_request: "models.ValidationRequest", **kwargs - ) -> "_models.ValidationResponse": + ) -> "models.ValidationResponse": """This method does all necessary pre-job creation validation under subscription. :param location: The location of the resource. :type location: str :param validation_request: Inputs of the customer. - :type validation_request: ~azure.mgmt.databox.models.ValidationRequest + :type validation_request: ~data_box_management_client.models.ValidationRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ValidationResponse, or the result of cls(response) - :rtype: ~azure.mgmt.databox.models.ValidationResponse + :rtype: ~data_box_management_client.models.ValidationResponse :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ValidationResponse"] + cls = kwargs.pop('cls', None) # type: ClsType["models.ValidationResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -389,7 +311,8 @@ async def validate_inputs( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ValidationResponse', pipeline_response) @@ -402,26 +325,27 @@ async def validate_inputs( async def region_configuration( self, location: str, - region_configuration_request: "_models.RegionConfigurationRequest", + region_configuration_request: "models.RegionConfigurationRequest", **kwargs - ) -> "_models.RegionConfigurationResponse": - """This API provides configuration details specific to given region/location. + ) -> "models.RegionConfigurationResponse": + """This API provides configuration details specific to given region/location at Subscription + level. :param location: The location of the resource. :type location: str :param region_configuration_request: Request body to get the configuration for the region. - :type region_configuration_request: ~azure.mgmt.databox.models.RegionConfigurationRequest + :type region_configuration_request: ~data_box_management_client.models.RegionConfigurationRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: RegionConfigurationResponse, or the result of cls(response) - :rtype: ~azure.mgmt.databox.models.RegionConfigurationResponse + :rtype: ~data_box_management_client.models.RegionConfigurationResponse :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.RegionConfigurationResponse"] + cls = kwargs.pop('cls', None) # type: ClsType["models.RegionConfigurationResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -451,7 +375,8 @@ async def region_configuration( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('RegionConfigurationResponse', pipeline_response) @@ -460,3 +385,72 @@ async def region_configuration( return deserialized region_configuration.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataBox/locations/{location}/regionConfiguration'} # type: ignore + + async def region_configuration_by_resource_group( + self, + resource_group_name: str, + location: str, + region_configuration_request: "models.RegionConfigurationRequest", + **kwargs + ) -> "models.RegionConfigurationResponse": + """This API provides configuration details specific to given region/location at Resource group + level. + + :param resource_group_name: The Resource Group Name. + :type resource_group_name: str + :param location: The location of the resource. + :type location: str + :param region_configuration_request: Request body to get the configuration for the region at + resource group level. + :type region_configuration_request: ~data_box_management_client.models.RegionConfigurationRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: RegionConfigurationResponse, or the result of cls(response) + :rtype: ~data_box_management_client.models.RegionConfigurationResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.RegionConfigurationResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.region_configuration_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'location': self._serialize.url("location", location, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(region_configuration_request, 'RegionConfigurationRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('RegionConfigurationResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + region_configuration_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBox/locations/{location}/regionConfiguration'} # type: ignore diff --git a/src/databox/azext_databox/vendored_sdks/databox/models.py b/src/databox/azext_databox/vendored_sdks/databox/models.py deleted file mode 100644 index 1d5d79558e5..00000000000 --- a/src/databox/azext_databox/vendored_sdks/databox/models.py +++ /dev/null @@ -1,7 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -from .v2019_09_01.models import * diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/models/__init__.py b/src/databox/azext_databox/vendored_sdks/databox/models/__init__.py similarity index 65% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/models/__init__.py rename to src/databox/azext_databox/vendored_sdks/databox/models/__init__.py index 84b34d519a6..762c3277b1c 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/models/__init__.py +++ b/src/databox/azext_databox/vendored_sdks/databox/models/__init__.py @@ -8,12 +8,16 @@ try: from ._models_py3 import AccountCredentialDetails + from ._models_py3 import AdditionalErrorInfo from ._models_py3 import AddressValidationOutput from ._models_py3 import AddressValidationProperties + from ._models_py3 import ApiError from ._models_py3 import ApplianceNetworkConfiguration from ._models_py3 import ArmBaseObject from ._models_py3 import AvailableSkuRequest from ._models_py3 import AvailableSkusResult + from ._models_py3 import AzureFileFilterDetails + from ._models_py3 import BlobFilterDetails from ._models_py3 import CancellationReason from ._models_py3 import CloudError from ._models_py3 import ContactDetails @@ -22,7 +26,12 @@ from ._models_py3 import CreateJobValidations from ._models_py3 import CreateOrderLimitForSubscriptionValidationRequest from ._models_py3 import CreateOrderLimitForSubscriptionValidationResponseProperties + from ._models_py3 import CustomerDiskJobSecrets + from ._models_py3 import DataAccountDetails from ._models_py3 import DataBoxAccountCopyLogDetails + from ._models_py3 import DataBoxCustomerDiskCopyLogDetails + from ._models_py3 import DataBoxCustomerDiskCopyProgress + from ._models_py3 import DataBoxCustomerDiskJobDetails from ._models_py3 import DataBoxDiskCopyLogDetails from ._models_py3 import DataBoxDiskCopyProgress from ._models_py3 import DataBoxDiskJobDetails @@ -34,30 +43,45 @@ from ._models_py3 import DataBoxJobDetails from ._models_py3 import DataBoxScheduleAvailabilityRequest from ._models_py3 import DataBoxSecret - from ._models_py3 import DataDestinationDetailsValidationRequest - from ._models_py3 import DataDestinationDetailsValidationResponseProperties + from ._models_py3 import DataExportDetails + from ._models_py3 import DataImportDetails + from ._models_py3 import DataLocationToServiceLocationMap + from ._models_py3 import DataTransferDetailsValidationRequest + from ._models_py3 import DataTransferDetailsValidationResponseProperties from ._models_py3 import DataboxJobSecrets + from ._models_py3 import DatacenterAddressInstructionResponse + from ._models_py3 import DatacenterAddressLocationResponse + from ._models_py3 import DatacenterAddressRequest + from ._models_py3 import DatacenterAddressResponse from ._models_py3 import DcAccessSecurityCode - from ._models_py3 import DestinationAccountDetails - from ._models_py3 import DestinationManagedDiskDetails - from ._models_py3 import DestinationStorageAccountDetails - from ._models_py3 import DestinationToServiceLocationMap + from ._models_py3 import Details from ._models_py3 import DiskScheduleAvailabilityRequest from ._models_py3 import DiskSecret - from ._models_py3 import Error + from ._models_py3 import EncryptionPreferences + from ._models_py3 import ErrorDetail + from ._models_py3 import ExportDiskDetails + from ._models_py3 import FilterFileDetails from ._models_py3 import HeavyScheduleAvailabilityRequest + from ._models_py3 import IdentityProperties + from ._models_py3 import ImportDiskDetails from ._models_py3 import JobDeliveryInfo from ._models_py3 import JobDetails - from ._models_py3 import JobErrorDetails from ._models_py3 import JobResource from ._models_py3 import JobResourceList from ._models_py3 import JobResourceUpdateParameter from ._models_py3 import JobSecrets from ._models_py3 import JobStages + from ._models_py3 import KeyEncryptionKey + from ._models_py3 import LastMitigationActionOnJob + from ._models_py3 import ManagedDiskDetails + from ._models_py3 import MarkDevicesShippedRequest + from ._models_py3 import MitigateJobRequest from ._models_py3 import NotificationPreference from ._models_py3 import Operation from ._models_py3 import OperationDisplay from ._models_py3 import OperationList + from ._models_py3 import PackageCarrierDetails + from ._models_py3 import PackageCarrierInfo from ._models_py3 import PackageShippingDetails from ._models_py3 import Preferences from ._models_py3 import PreferencesValidationRequest @@ -65,6 +89,7 @@ from ._models_py3 import RegionConfigurationRequest from ._models_py3 import RegionConfigurationResponse from ._models_py3 import Resource + from ._models_py3 import ResourceIdentity from ._models_py3 import ScheduleAvailabilityRequest from ._models_py3 import ScheduleAvailabilityResponse from ._models_py3 import ShareCredentialDetails @@ -77,8 +102,15 @@ from ._models_py3 import SkuCapacity from ._models_py3 import SkuCost from ._models_py3 import SkuInformation + from ._models_py3 import StorageAccountDetails from ._models_py3 import SubscriptionIsAllowedToCreateJobValidationRequest from ._models_py3 import SubscriptionIsAllowedToCreateJobValidationResponseProperties + from ._models_py3 import SystemData + from ._models_py3 import TransferAllDetails + from ._models_py3 import TransferConfiguration + from ._models_py3 import TransferConfigurationTransferAllDetails + from ._models_py3 import TransferConfigurationTransferFilterDetails + from ._models_py3 import TransferFilterDetails from ._models_py3 import TransportAvailabilityDetails from ._models_py3 import TransportAvailabilityRequest from ._models_py3 import TransportAvailabilityResponse @@ -86,6 +118,8 @@ from ._models_py3 import UnencryptedCredentials from ._models_py3 import UnencryptedCredentialsList from ._models_py3 import UpdateJobDetails + from ._models_py3 import UserAssignedIdentity + from ._models_py3 import UserAssignedProperties from ._models_py3 import ValidateAddress from ._models_py3 import ValidationInputRequest from ._models_py3 import ValidationInputResponse @@ -93,12 +127,16 @@ from ._models_py3 import ValidationResponse except (SyntaxError, ImportError): from ._models import AccountCredentialDetails # type: ignore + from ._models import AdditionalErrorInfo # type: ignore from ._models import AddressValidationOutput # type: ignore from ._models import AddressValidationProperties # type: ignore + from ._models import ApiError # type: ignore from ._models import ApplianceNetworkConfiguration # type: ignore from ._models import ArmBaseObject # type: ignore from ._models import AvailableSkuRequest # type: ignore from ._models import AvailableSkusResult # type: ignore + from ._models import AzureFileFilterDetails # type: ignore + from ._models import BlobFilterDetails # type: ignore from ._models import CancellationReason # type: ignore from ._models import CloudError # type: ignore from ._models import ContactDetails # type: ignore @@ -107,7 +145,12 @@ from ._models import CreateJobValidations # type: ignore from ._models import CreateOrderLimitForSubscriptionValidationRequest # type: ignore from ._models import CreateOrderLimitForSubscriptionValidationResponseProperties # type: ignore + from ._models import CustomerDiskJobSecrets # type: ignore + from ._models import DataAccountDetails # type: ignore from ._models import DataBoxAccountCopyLogDetails # type: ignore + from ._models import DataBoxCustomerDiskCopyLogDetails # type: ignore + from ._models import DataBoxCustomerDiskCopyProgress # type: ignore + from ._models import DataBoxCustomerDiskJobDetails # type: ignore from ._models import DataBoxDiskCopyLogDetails # type: ignore from ._models import DataBoxDiskCopyProgress # type: ignore from ._models import DataBoxDiskJobDetails # type: ignore @@ -119,30 +162,45 @@ from ._models import DataBoxJobDetails # type: ignore from ._models import DataBoxScheduleAvailabilityRequest # type: ignore from ._models import DataBoxSecret # type: ignore - from ._models import DataDestinationDetailsValidationRequest # type: ignore - from ._models import DataDestinationDetailsValidationResponseProperties # type: ignore + from ._models import DataExportDetails # type: ignore + from ._models import DataImportDetails # type: ignore + from ._models import DataLocationToServiceLocationMap # type: ignore + from ._models import DataTransferDetailsValidationRequest # type: ignore + from ._models import DataTransferDetailsValidationResponseProperties # type: ignore from ._models import DataboxJobSecrets # type: ignore + from ._models import DatacenterAddressInstructionResponse # type: ignore + from ._models import DatacenterAddressLocationResponse # type: ignore + from ._models import DatacenterAddressRequest # type: ignore + from ._models import DatacenterAddressResponse # type: ignore from ._models import DcAccessSecurityCode # type: ignore - from ._models import DestinationAccountDetails # type: ignore - from ._models import DestinationManagedDiskDetails # type: ignore - from ._models import DestinationStorageAccountDetails # type: ignore - from ._models import DestinationToServiceLocationMap # type: ignore + from ._models import Details # type: ignore from ._models import DiskScheduleAvailabilityRequest # type: ignore from ._models import DiskSecret # type: ignore - from ._models import Error # type: ignore + from ._models import EncryptionPreferences # type: ignore + from ._models import ErrorDetail # type: ignore + from ._models import ExportDiskDetails # type: ignore + from ._models import FilterFileDetails # type: ignore from ._models import HeavyScheduleAvailabilityRequest # type: ignore + from ._models import IdentityProperties # type: ignore + from ._models import ImportDiskDetails # type: ignore from ._models import JobDeliveryInfo # type: ignore from ._models import JobDetails # type: ignore - from ._models import JobErrorDetails # type: ignore from ._models import JobResource # type: ignore from ._models import JobResourceList # type: ignore from ._models import JobResourceUpdateParameter # type: ignore from ._models import JobSecrets # type: ignore from ._models import JobStages # type: ignore + from ._models import KeyEncryptionKey # type: ignore + from ._models import LastMitigationActionOnJob # type: ignore + from ._models import ManagedDiskDetails # type: ignore + from ._models import MarkDevicesShippedRequest # type: ignore + from ._models import MitigateJobRequest # type: ignore from ._models import NotificationPreference # type: ignore from ._models import Operation # type: ignore from ._models import OperationDisplay # type: ignore from ._models import OperationList # type: ignore + from ._models import PackageCarrierDetails # type: ignore + from ._models import PackageCarrierInfo # type: ignore from ._models import PackageShippingDetails # type: ignore from ._models import Preferences # type: ignore from ._models import PreferencesValidationRequest # type: ignore @@ -150,6 +208,7 @@ from ._models import RegionConfigurationRequest # type: ignore from ._models import RegionConfigurationResponse # type: ignore from ._models import Resource # type: ignore + from ._models import ResourceIdentity # type: ignore from ._models import ScheduleAvailabilityRequest # type: ignore from ._models import ScheduleAvailabilityResponse # type: ignore from ._models import ShareCredentialDetails # type: ignore @@ -162,8 +221,15 @@ from ._models import SkuCapacity # type: ignore from ._models import SkuCost # type: ignore from ._models import SkuInformation # type: ignore + from ._models import StorageAccountDetails # type: ignore from ._models import SubscriptionIsAllowedToCreateJobValidationRequest # type: ignore from ._models import SubscriptionIsAllowedToCreateJobValidationResponseProperties # type: ignore + from ._models import SystemData # type: ignore + from ._models import TransferAllDetails # type: ignore + from ._models import TransferConfiguration # type: ignore + from ._models import TransferConfigurationTransferAllDetails # type: ignore + from ._models import TransferConfigurationTransferFilterDetails # type: ignore + from ._models import TransferFilterDetails # type: ignore from ._models import TransportAvailabilityDetails # type: ignore from ._models import TransportAvailabilityRequest # type: ignore from ._models import TransportAvailabilityResponse # type: ignore @@ -171,6 +237,8 @@ from ._models import UnencryptedCredentials # type: ignore from ._models import UnencryptedCredentialsList # type: ignore from ._models import UpdateJobDetails # type: ignore + from ._models import UserAssignedIdentity # type: ignore + from ._models import UserAssignedProperties # type: ignore from ._models import ValidateAddress # type: ignore from ._models import ValidationInputRequest # type: ignore from ._models import ValidationInputResponse # type: ignore @@ -183,8 +251,14 @@ AddressValidationStatus, ClassDiscriminator, CopyStatus, - DataDestinationType, + CustomerResolutionCode, + DataAccountType, + DatacenterAddressType, + DoubleEncryption, + FilterFileType, JobDeliveryType, + KekType, + LogCollectionLevel, NotificationStageName, OverallValidationStatus, ShareDestinationFormatType, @@ -192,6 +266,8 @@ SkuName, StageName, StageStatus, + TransferConfigurationType, + TransferType, TransportShipmentTypes, ValidationInputDiscriminator, ValidationStatus, @@ -199,12 +275,16 @@ __all__ = [ 'AccountCredentialDetails', + 'AdditionalErrorInfo', 'AddressValidationOutput', 'AddressValidationProperties', + 'ApiError', 'ApplianceNetworkConfiguration', 'ArmBaseObject', 'AvailableSkuRequest', 'AvailableSkusResult', + 'AzureFileFilterDetails', + 'BlobFilterDetails', 'CancellationReason', 'CloudError', 'ContactDetails', @@ -213,7 +293,12 @@ 'CreateJobValidations', 'CreateOrderLimitForSubscriptionValidationRequest', 'CreateOrderLimitForSubscriptionValidationResponseProperties', + 'CustomerDiskJobSecrets', + 'DataAccountDetails', 'DataBoxAccountCopyLogDetails', + 'DataBoxCustomerDiskCopyLogDetails', + 'DataBoxCustomerDiskCopyProgress', + 'DataBoxCustomerDiskJobDetails', 'DataBoxDiskCopyLogDetails', 'DataBoxDiskCopyProgress', 'DataBoxDiskJobDetails', @@ -225,30 +310,45 @@ 'DataBoxJobDetails', 'DataBoxScheduleAvailabilityRequest', 'DataBoxSecret', - 'DataDestinationDetailsValidationRequest', - 'DataDestinationDetailsValidationResponseProperties', + 'DataExportDetails', + 'DataImportDetails', + 'DataLocationToServiceLocationMap', + 'DataTransferDetailsValidationRequest', + 'DataTransferDetailsValidationResponseProperties', 'DataboxJobSecrets', + 'DatacenterAddressInstructionResponse', + 'DatacenterAddressLocationResponse', + 'DatacenterAddressRequest', + 'DatacenterAddressResponse', 'DcAccessSecurityCode', - 'DestinationAccountDetails', - 'DestinationManagedDiskDetails', - 'DestinationStorageAccountDetails', - 'DestinationToServiceLocationMap', + 'Details', 'DiskScheduleAvailabilityRequest', 'DiskSecret', - 'Error', + 'EncryptionPreferences', + 'ErrorDetail', + 'ExportDiskDetails', + 'FilterFileDetails', 'HeavyScheduleAvailabilityRequest', + 'IdentityProperties', + 'ImportDiskDetails', 'JobDeliveryInfo', 'JobDetails', - 'JobErrorDetails', 'JobResource', 'JobResourceList', 'JobResourceUpdateParameter', 'JobSecrets', 'JobStages', + 'KeyEncryptionKey', + 'LastMitigationActionOnJob', + 'ManagedDiskDetails', + 'MarkDevicesShippedRequest', + 'MitigateJobRequest', 'NotificationPreference', 'Operation', 'OperationDisplay', 'OperationList', + 'PackageCarrierDetails', + 'PackageCarrierInfo', 'PackageShippingDetails', 'Preferences', 'PreferencesValidationRequest', @@ -256,6 +356,7 @@ 'RegionConfigurationRequest', 'RegionConfigurationResponse', 'Resource', + 'ResourceIdentity', 'ScheduleAvailabilityRequest', 'ScheduleAvailabilityResponse', 'ShareCredentialDetails', @@ -268,8 +369,15 @@ 'SkuCapacity', 'SkuCost', 'SkuInformation', + 'StorageAccountDetails', 'SubscriptionIsAllowedToCreateJobValidationRequest', 'SubscriptionIsAllowedToCreateJobValidationResponseProperties', + 'SystemData', + 'TransferAllDetails', + 'TransferConfiguration', + 'TransferConfigurationTransferAllDetails', + 'TransferConfigurationTransferFilterDetails', + 'TransferFilterDetails', 'TransportAvailabilityDetails', 'TransportAvailabilityRequest', 'TransportAvailabilityResponse', @@ -277,6 +385,8 @@ 'UnencryptedCredentials', 'UnencryptedCredentialsList', 'UpdateJobDetails', + 'UserAssignedIdentity', + 'UserAssignedProperties', 'ValidateAddress', 'ValidationInputRequest', 'ValidationInputResponse', @@ -287,8 +397,14 @@ 'AddressValidationStatus', 'ClassDiscriminator', 'CopyStatus', - 'DataDestinationType', + 'CustomerResolutionCode', + 'DataAccountType', + 'DatacenterAddressType', + 'DoubleEncryption', + 'FilterFileType', 'JobDeliveryType', + 'KekType', + 'LogCollectionLevel', 'NotificationStageName', 'OverallValidationStatus', 'ShareDestinationFormatType', @@ -296,6 +412,8 @@ 'SkuName', 'StageName', 'StageStatus', + 'TransferConfigurationType', + 'TransferType', 'TransportShipmentTypes', 'ValidationInputDiscriminator', 'ValidationStatus', diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/models/_data_box_management_client_enums.py b/src/databox/azext_databox/vendored_sdks/databox/models/_data_box_management_client_enums.py similarity index 68% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/models/_data_box_management_client_enums.py rename to src/databox/azext_databox/vendored_sdks/databox/models/_data_box_management_client_enums.py index e3da769b08c..7df2fe659e1 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/models/_data_box_management_client_enums.py +++ b/src/databox/azext_databox/vendored_sdks/databox/models/_data_box_management_client_enums.py @@ -51,9 +51,10 @@ class ClassDiscriminator(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Indicates the type of job details. """ - DATA_BOX = "DataBox" #: Databox orders. - DATA_BOX_DISK = "DataBoxDisk" #: DataboxDisk orders. - DATA_BOX_HEAVY = "DataBoxHeavy" #: DataboxHeavy orders. + DATA_BOX = "DataBox" #: Data Box orders. + DATA_BOX_DISK = "DataBoxDisk" #: Data Box Disk orders. + DATA_BOX_HEAVY = "DataBoxHeavy" #: Data Box Heavy orders. + DATA_BOX_CUSTOMER_DISK = "DataBoxCustomerDisk" #: Data Box Customer Disk orders. class CopyStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The Status of the copy @@ -71,13 +72,40 @@ class CopyStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): STORAGE_ACCOUNT_NOT_ACCESSIBLE = "StorageAccountNotAccessible" #: Data copy failed. Storage Account was not accessible during copy. UNSUPPORTED_DATA = "UnsupportedData" #: Data copy failed. The Device data content is not supported. -class DataDestinationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """Data Destination Type. +class CustomerResolutionCode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + NONE = "None" #: No Resolution Yet. + MOVE_TO_CLEAN_UP_DEVICE = "MoveToCleanUpDevice" #: Clean the device. + RESUME = "Resume" #: Resume the job to same stage. + +class DataAccountType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Type of the account. """ STORAGE_ACCOUNT = "StorageAccount" #: Storage Accounts . MANAGED_DISK = "ManagedDisk" #: Azure Managed disk storage. +class DatacenterAddressType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Data center address type + """ + + DATACENTER_ADDRESS_LOCATION = "DatacenterAddressLocation" #: Data center address location. + DATACENTER_ADDRESS_INSTRUCTION = "DatacenterAddressInstruction" #: Data center address instruction. + +class DoubleEncryption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Defines secondary layer of software-based encryption enablement. + """ + + ENABLED = "Enabled" #: Software-based encryption is enabled. + DISABLED = "Disabled" #: Software-based encryption is disabled. + +class FilterFileType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Type of the filter file. + """ + + AZURE_BLOB = "AzureBlob" #: Filter file is of the type AzureBlob. + AZURE_FILE = "AzureFile" #: Filter file is of the type AzureFiles. + class JobDeliveryType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Delivery type of Job. """ @@ -85,6 +113,20 @@ class JobDeliveryType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): NON_SCHEDULED = "NonScheduled" #: Non Scheduled job. SCHEDULED = "Scheduled" #: Scheduled job. +class KekType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Type of encryption key used for key encryption. + """ + + MICROSOFT_MANAGED = "MicrosoftManaged" #: Key encryption key is managed by Microsoft. + CUSTOMER_MANAGED = "CustomerManaged" #: Key encryption key is managed by the Customer. + +class LogCollectionLevel(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Level of the logs to be collected. + """ + + ERROR = "Error" #: Only Errors will be collected in the logs. + VERBOSE = "Verbose" #: Verbose logging (includes Errors, CRC, size information and others). + class NotificationStageName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Name of the stage. """ @@ -93,7 +135,7 @@ class NotificationStageName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)) DISPATCHED = "Dispatched" #: Notification at device dispatched stage. DELIVERED = "Delivered" #: Notification at device delivered stage. PICKED_UP = "PickedUp" #: Notification at device picked up from user stage. - AT_AZURE_DC = "AtAzureDC" #: Notification at device received at azure datacenter stage. + AT_AZURE_DC = "AtAzureDC" #: Notification at device received at Azure datacenter stage. DATA_COPY = "DataCopy" #: Notification at data copy started stage. class OverallValidationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): @@ -128,9 +170,10 @@ class SkuDisabledReason(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): class SkuName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - DATA_BOX = "DataBox" #: Databox. - DATA_BOX_DISK = "DataBoxDisk" #: DataboxDisk. - DATA_BOX_HEAVY = "DataBoxHeavy" #: DataboxHeavy. + DATA_BOX = "DataBox" #: Data Box. + DATA_BOX_DISK = "DataBoxDisk" #: Data Box Disk. + DATA_BOX_HEAVY = "DataBoxHeavy" #: Data Box Heavy. + DATA_BOX_CUSTOMER_DISK = "DataBoxCustomerDisk" #: Data Box Customer Disk. class StageName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Name of the stage which is in progress. @@ -140,18 +183,21 @@ class StageName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): DEVICE_PREPARED = "DevicePrepared" #: A device has been prepared for the order. DISPATCHED = "Dispatched" #: Device has been dispatched to the user of the order. DELIVERED = "Delivered" #: Device has been delivered to the user of the order. - PICKED_UP = "PickedUp" #: Device has been picked up from user and in transit to azure datacenter. - AT_AZURE_DC = "AtAzureDC" #: Device has been received at azure datacenter from the user. - DATA_COPY = "DataCopy" #: Data copy from the device at azure datacenter. + PICKED_UP = "PickedUp" #: Device has been picked up from user and in transit to Azure datacenter. + AT_AZURE_DC = "AtAzureDC" #: Device has been received at Azure datacenter from the user. + DATA_COPY = "DataCopy" #: Data copy from the device at Azure datacenter. COMPLETED = "Completed" #: Order has completed. COMPLETED_WITH_ERRORS = "CompletedWithErrors" #: Order has completed with errors. CANCELLED = "Cancelled" #: Order has been cancelled. FAILED_ISSUE_REPORTED_AT_CUSTOMER = "Failed_IssueReportedAtCustomer" #: Order has failed due to issue reported by user. - FAILED_ISSUE_DETECTED_AT_AZURE_DC = "Failed_IssueDetectedAtAzureDC" #: Order has failed due to issue detected at azure datacenter. + FAILED_ISSUE_DETECTED_AT_AZURE_DC = "Failed_IssueDetectedAtAzureDC" #: Order has failed due to issue detected at Azure datacenter. ABORTED = "Aborted" #: Order has been aborted. COMPLETED_WITH_WARNINGS = "CompletedWithWarnings" #: Order has completed with warnings. READY_TO_DISPATCH_FROM_AZURE_DC = "ReadyToDispatchFromAzureDC" #: Device is ready to be handed to customer from Azure DC. READY_TO_RECEIVE_AT_AZURE_DC = "ReadyToReceiveAtAzureDC" #: Device can be dropped off at Azure DC. + CREATED = "Created" #: Job created by the customer. + SHIPPING = "Shipping" #: User shipping the device to AzureDC. + PACKAGING = "Packaging" #: Packaging the device to return to customer. class StageStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Status of the job stage. @@ -164,6 +210,25 @@ class StageStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): CANCELLED = "Cancelled" #: Stage has been cancelled. CANCELLING = "Cancelling" #: Stage is cancelling. SUCCEEDED_WITH_ERRORS = "SucceededWithErrors" #: Stage has succeeded with errors. + WAITING_FOR_CUSTOMER_ACTION = "WaitingForCustomerAction" #: Stage is stuck until customer takes some action. + SUCCEEDED_WITH_WARNINGS = "SucceededWithWarnings" #: Stage has succeeded with warnings. + WAITING_FOR_CUSTOMER_ACTION_FOR_KEK = "WaitingForCustomerActionForKek" #: Stage is waiting for customer action for kek action items. + WAITING_FOR_CUSTOMER_ACTION_FOR_CLEAN_UP = "WaitingForCustomerActionForCleanUp" #: Stage is waiting for customer action for clean up. + CUSTOMER_ACTION_PERFORMED_FOR_CLEAN_UP = "CustomerActionPerformedForCleanUp" #: Stage has performed customer action for clean up. + +class TransferConfigurationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Type of the configuration for transfer. + """ + + TRANSFER_ALL = "TransferAll" #: Transfer all the data. + TRANSFER_USING_FILTER = "TransferUsingFilter" #: Transfer using filter. + +class TransferType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Type of the transfer. + """ + + IMPORT_TO_AZURE = "ImportToAzure" #: Import data to azure. + EXPORT_FROM_AZURE = "ExportFromAzure" #: Export data from azure. class TransportShipmentTypes(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Transport Shipment Type supported for given region. @@ -177,11 +242,11 @@ class ValidationInputDiscriminator(with_metaclass(_CaseInsensitiveEnumMeta, str, """ VALIDATE_ADDRESS = "ValidateAddress" #: Identify request and response of address validation. - VALIDATE_DATA_DESTINATION_DETAILS = "ValidateDataDestinationDetails" #: Identify request and response of data destination details validation. VALIDATE_SUBSCRIPTION_IS_ALLOWED_TO_CREATE_JOB = "ValidateSubscriptionIsAllowedToCreateJob" #: Identify request and response for validation of subscription permission to create job. VALIDATE_PREFERENCES = "ValidatePreferences" #: Identify request and response of preference validation. VALIDATE_CREATE_ORDER_LIMIT = "ValidateCreateOrderLimit" #: Identify request and response of create order limit for subscription validation. VALIDATE_SKU_AVAILABILITY = "ValidateSkuAvailability" #: Identify request and response of active job limit for sku availability. + VALIDATE_DATA_TRANSFER_DETAILS = "ValidateDataTransferDetails" #: Identify request and response of data transfer details validation. class ValidationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Create order limit validation status. diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/models/_models.py b/src/databox/azext_databox/vendored_sdks/databox/models/_models.py similarity index 50% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/models/_models.py rename to src/databox/azext_databox/vendored_sdks/databox/models/_models.py index 0d62bb7ec20..eb9e1ca9f8b 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/models/_models.py +++ b/src/databox/azext_databox/vendored_sdks/databox/models/_models.py @@ -6,6 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from azure.core.exceptions import HttpResponseError import msrest.serialization @@ -16,26 +17,27 @@ class AccountCredentialDetails(msrest.serialization.Model): :ivar account_name: Name of the account. :vartype account_name: str - :ivar data_destination_type: Data Destination Type. Possible values include: "StorageAccount", + :ivar data_account_type: Type of the account. Possible values include: "StorageAccount", "ManagedDisk". - :vartype data_destination_type: str or ~azure.mgmt.databox.models.DataDestinationType + :vartype data_account_type: str or ~data_box_management_client.models.DataAccountType :ivar account_connection_string: Connection string of the account endpoint to use the account as a storage endpoint on the device. :vartype account_connection_string: str :ivar share_credential_details: Per share level unencrypted access credentials. - :vartype share_credential_details: list[~azure.mgmt.databox.models.ShareCredentialDetails] + :vartype share_credential_details: + list[~data_box_management_client.models.ShareCredentialDetails] """ _validation = { 'account_name': {'readonly': True}, - 'data_destination_type': {'readonly': True}, + 'data_account_type': {'readonly': True}, 'account_connection_string': {'readonly': True}, 'share_credential_details': {'readonly': True}, } _attribute_map = { 'account_name': {'key': 'accountName', 'type': 'str'}, - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, 'account_connection_string': {'key': 'accountConnectionString', 'type': 'str'}, 'share_credential_details': {'key': 'shareCredentialDetails', 'type': '[ShareCredentialDetails]'}, } @@ -46,28 +48,51 @@ def __init__( ): super(AccountCredentialDetails, self).__init__(**kwargs) self.account_name = None - self.data_destination_type = None + self.data_account_type = None self.account_connection_string = None self.share_credential_details = None +class AdditionalErrorInfo(msrest.serialization.Model): + """Additional error info. + + :param type: Additional error type. + :type type: str + :param info: Additional error info. + :type info: object + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'info': {'key': 'info', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AdditionalErrorInfo, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + self.info = kwargs.get('info', None) + + class AddressValidationOutput(msrest.serialization.Model): """Output of the address validation api. Variables are only populated by the server, and will be ignored when sending a request. :param validation_type: Identifies the type of validation response.Constant filled by server. - Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", - "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", + "ValidatePreferences", "ValidateCreateOrderLimit", "ValidateSkuAvailability", + "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar validation_status: The address validation status. Possible values include: "Valid", "Invalid", "Ambiguous". - :vartype validation_status: str or ~azure.mgmt.databox.models.AddressValidationStatus + :vartype validation_status: str or ~data_box_management_client.models.AddressValidationStatus :ivar alternate_addresses: List of alternate addresses. - :vartype alternate_addresses: list[~azure.mgmt.databox.models.ShippingAddress] + :vartype alternate_addresses: list[~data_box_management_client.models.ShippingAddress] """ _validation = { @@ -78,7 +103,7 @@ class AddressValidationOutput(msrest.serialization.Model): _attribute_map = { 'validation_type': {'key': 'properties.validationType', 'type': 'str'}, - 'error': {'key': 'properties.error', 'type': 'Error'}, + 'error': {'key': 'properties.error', 'type': 'CloudError'}, 'validation_status': {'key': 'properties.validationStatus', 'type': 'str'}, 'alternate_addresses': {'key': 'properties.alternateAddresses', 'type': '[ShippingAddress]'}, } @@ -98,19 +123,19 @@ class ValidationInputResponse(msrest.serialization.Model): """Minimum properties that should be present in each individual validation response. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AddressValidationProperties, CreateOrderLimitForSubscriptionValidationResponseProperties, DataDestinationDetailsValidationResponseProperties, PreferencesValidationResponseProperties, SkuAvailabilityValidationResponseProperties, SubscriptionIsAllowedToCreateJobValidationResponseProperties. + sub-classes are: AddressValidationProperties, CreateOrderLimitForSubscriptionValidationResponseProperties, DataTransferDetailsValidationResponseProperties, PreferencesValidationResponseProperties, SkuAvailabilityValidationResponseProperties, SubscriptionIsAllowedToCreateJobValidationResponseProperties. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error + :vartype error: ~data_box_management_client.models.CloudError """ _validation = { @@ -120,11 +145,11 @@ class ValidationInputResponse(msrest.serialization.Model): _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, } _subtype_map = { - 'validation_type': {'ValidateAddress': 'AddressValidationProperties', 'ValidateCreateOrderLimit': 'CreateOrderLimitForSubscriptionValidationResponseProperties', 'ValidateDataDestinationDetails': 'DataDestinationDetailsValidationResponseProperties', 'ValidatePreferences': 'PreferencesValidationResponseProperties', 'ValidateSkuAvailability': 'SkuAvailabilityValidationResponseProperties', 'ValidateSubscriptionIsAllowedToCreateJob': 'SubscriptionIsAllowedToCreateJobValidationResponseProperties'} + 'validation_type': {'ValidateAddress': 'AddressValidationProperties', 'ValidateCreateOrderLimit': 'CreateOrderLimitForSubscriptionValidationResponseProperties', 'ValidateDataTransferDetails': 'DataTransferDetailsValidationResponseProperties', 'ValidatePreferences': 'PreferencesValidationResponseProperties', 'ValidateSkuAvailability': 'SkuAvailabilityValidationResponseProperties', 'ValidateSubscriptionIsAllowedToCreateJob': 'SubscriptionIsAllowedToCreateJobValidationResponseProperties'} } def __init__( @@ -144,17 +169,17 @@ class AddressValidationProperties(ValidationInputResponse): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar validation_status: The address validation status. Possible values include: "Valid", "Invalid", "Ambiguous". - :vartype validation_status: str or ~azure.mgmt.databox.models.AddressValidationStatus + :vartype validation_status: str or ~data_box_management_client.models.AddressValidationStatus :ivar alternate_addresses: List of alternate addresses. - :vartype alternate_addresses: list[~azure.mgmt.databox.models.ShippingAddress] + :vartype alternate_addresses: list[~data_box_management_client.models.ShippingAddress] """ _validation = { @@ -166,7 +191,7 @@ class AddressValidationProperties(ValidationInputResponse): _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'validation_status': {'key': 'validationStatus', 'type': 'str'}, 'alternate_addresses': {'key': 'alternateAddresses', 'type': '[ShippingAddress]'}, } @@ -181,6 +206,31 @@ def __init__( self.alternate_addresses = None +class ApiError(msrest.serialization.Model): + """ApiError. + + All required parameters must be populated in order to send to Azure. + + :param error: Required. + :type error: ~data_box_management_client.models.ErrorDetail + """ + + _validation = { + 'error': {'required': True}, + } + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorDetail'}, + } + + def __init__( + self, + **kwargs + ): + super(ApiError, self).__init__(**kwargs) + self.error = kwargs['error'] + + class ApplianceNetworkConfiguration(msrest.serialization.Model): """The Network Adapter configuration of a DataBox. @@ -249,12 +299,11 @@ def __init__( class AvailableSkuRequest(msrest.serialization.Model): """The filters for showing the available skus. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - :ivar transfer_type: Required. Type of the transfer. Default value: "ImportToAzure". - :vartype transfer_type: str + :param transfer_type: Required. Type of the transfer. Possible values include: "ImportToAzure", + "ExportFromAzure". + :type transfer_type: str or ~data_box_management_client.models.TransferType :param country: Required. ISO country code. Country for hardware shipment. For codes check: https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements. :type country: str @@ -262,11 +311,11 @@ class AvailableSkuRequest(msrest.serialization.Model): https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. :type location: str :param sku_names: Sku Names to filter for available skus. - :type sku_names: list[str or ~azure.mgmt.databox.models.SkuName] + :type sku_names: list[str or ~data_box_management_client.models.SkuName] """ _validation = { - 'transfer_type': {'required': True, 'constant': True}, + 'transfer_type': {'required': True}, 'country': {'required': True}, 'location': {'required': True}, } @@ -278,13 +327,12 @@ class AvailableSkuRequest(msrest.serialization.Model): 'sku_names': {'key': 'skuNames', 'type': '[str]'}, } - transfer_type = "ImportToAzure" - def __init__( self, **kwargs ): super(AvailableSkuRequest, self).__init__(**kwargs) + self.transfer_type = kwargs['transfer_type'] self.country = kwargs['country'] self.location = kwargs['location'] self.sku_names = kwargs.get('sku_names', None) @@ -296,7 +344,7 @@ class AvailableSkusResult(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. :ivar value: List of available skus. - :vartype value: list[~azure.mgmt.databox.models.SkuInformation] + :vartype value: list[~data_box_management_client.models.SkuInformation] :param next_link: Link for the next set of skus. :type next_link: str """ @@ -319,6 +367,60 @@ def __init__( self.next_link = kwargs.get('next_link', None) +class AzureFileFilterDetails(msrest.serialization.Model): + """Filter details to transfer Azure files. + + :param file_prefix_list: Prefix list of the Azure files to be transferred. + :type file_prefix_list: list[str] + :param file_path_list: List of full path of the files to be transferred. + :type file_path_list: list[str] + :param file_share_list: List of file shares to be transferred. + :type file_share_list: list[str] + """ + + _attribute_map = { + 'file_prefix_list': {'key': 'filePrefixList', 'type': '[str]'}, + 'file_path_list': {'key': 'filePathList', 'type': '[str]'}, + 'file_share_list': {'key': 'fileShareList', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureFileFilterDetails, self).__init__(**kwargs) + self.file_prefix_list = kwargs.get('file_prefix_list', None) + self.file_path_list = kwargs.get('file_path_list', None) + self.file_share_list = kwargs.get('file_share_list', None) + + +class BlobFilterDetails(msrest.serialization.Model): + """Filter details to transfer Azure Blobs. + + :param blob_prefix_list: Prefix list of the Azure blobs to be transferred. + :type blob_prefix_list: list[str] + :param blob_path_list: List of full path of the blobs to be transferred. + :type blob_path_list: list[str] + :param container_list: List of blob containers to be transferred. + :type container_list: list[str] + """ + + _attribute_map = { + 'blob_prefix_list': {'key': 'blobPrefixList', 'type': '[str]'}, + 'blob_path_list': {'key': 'blobPathList', 'type': '[str]'}, + 'container_list': {'key': 'containerList', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(BlobFilterDetails, self).__init__(**kwargs) + self.blob_prefix_list = kwargs.get('blob_prefix_list', None) + self.blob_path_list = kwargs.get('blob_path_list', None) + self.container_list = kwargs.get('container_list', None) + + class CancellationReason(msrest.serialization.Model): """Reason for cancellation. @@ -345,23 +447,25 @@ def __init__( class CloudError(msrest.serialization.Model): - """The error information object. + """Cloud error. Variables are only populated by the server, and will be ignored when sending a request. - :ivar code: Error code string. - :vartype code: str - :ivar message: Descriptive error information. - :vartype message: str - :param target: Error target. + :param code: Cloud error code. + :type code: str + :param message: Cloud error message. + :type message: str + :param target: Cloud error target. :type target: str - :param details: More detailed error information. - :type details: list[~azure.mgmt.databox.models.CloudError] + :ivar details: Cloud error details. + :vartype details: list[~data_box_management_client.models.CloudError] + :ivar additional_info: Cloud error additional info. + :vartype additional_info: list[~data_box_management_client.models.AdditionalErrorInfo] """ _validation = { - 'code': {'readonly': True}, - 'message': {'readonly': True}, + 'details': {'readonly': True}, + 'additional_info': {'readonly': True}, } _attribute_map = { @@ -369,6 +473,7 @@ class CloudError(msrest.serialization.Model): 'message': {'key': 'message', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, 'details': {'key': 'details', 'type': '[CloudError]'}, + 'additional_info': {'key': 'additionalInfo', 'type': '[AdditionalErrorInfo]'}, } def __init__( @@ -376,10 +481,11 @@ def __init__( **kwargs ): super(CloudError, self).__init__(**kwargs) - self.code = None - self.message = None + self.code = kwargs.get('code', None) + self.message = kwargs.get('message', None) self.target = kwargs.get('target', None) - self.details = kwargs.get('details', None) + self.details = None + self.additional_info = None class ContactDetails(msrest.serialization.Model): @@ -398,7 +504,7 @@ class ContactDetails(msrest.serialization.Model): :param email_list: Required. List of Email-ids to be notified about job progress. :type email_list: list[str] :param notification_preference: Notification preference for a job stage. - :type notification_preference: list[~azure.mgmt.databox.models.NotificationPreference] + :type notification_preference: list[~data_box_management_client.models.NotificationPreference] """ _validation = { @@ -433,13 +539,14 @@ class CopyLogDetails(msrest.serialization.Model): """Details for log generated during copy. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DataBoxAccountCopyLogDetails, DataBoxDiskCopyLogDetails, DataBoxHeavyAccountCopyLogDetails. + sub-classes are: DataBoxAccountCopyLogDetails, DataBoxCustomerDiskCopyLogDetails, DataBoxDiskCopyLogDetails, DataBoxHeavyAccountCopyLogDetails. All required parameters must be populated in order to send to Azure. :param copy_log_details_type: Required. Indicates the type of job details.Constant filled by - server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type copy_log_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator + server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", + "DataBoxCustomerDisk". + :type copy_log_details_type: str or ~data_box_management_client.models.ClassDiscriminator """ _validation = { @@ -451,7 +558,7 @@ class CopyLogDetails(msrest.serialization.Model): } _subtype_map = { - 'copy_log_details_type': {'DataBox': 'DataBoxAccountCopyLogDetails', 'DataBoxDisk': 'DataBoxDiskCopyLogDetails', 'DataBoxHeavy': 'DataBoxHeavyAccountCopyLogDetails'} + 'copy_log_details_type': {'DataBox': 'DataBoxAccountCopyLogDetails', 'DataBoxCustomerDisk': 'DataBoxCustomerDiskCopyLogDetails', 'DataBoxDisk': 'DataBoxDiskCopyLogDetails', 'DataBoxHeavy': 'DataBoxHeavyAccountCopyLogDetails'} } def __init__( @@ -467,20 +574,24 @@ class CopyProgress(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. - :ivar storage_account_name: Name of the storage account where the data needs to be uploaded. + :ivar storage_account_name: Name of the storage account. This will be empty for data account + types other than storage account. :vartype storage_account_name: str - :ivar data_destination_type: Data Destination Type. Possible values include: "StorageAccount", + :ivar transfer_type: Transfer type of data. Possible values include: "ImportToAzure", + "ExportFromAzure". + :vartype transfer_type: str or ~data_box_management_client.models.TransferType + :ivar data_account_type: Data Account Type. Possible values include: "StorageAccount", "ManagedDisk". - :vartype data_destination_type: str or ~azure.mgmt.databox.models.DataDestinationType + :vartype data_account_type: str or ~data_box_management_client.models.DataAccountType :ivar account_id: Id of the account where the data needs to be uploaded. :vartype account_id: str - :ivar bytes_sent_to_cloud: Amount of data uploaded by the job as of now. - :vartype bytes_sent_to_cloud: long + :ivar bytes_processed: To indicate bytes transferred. + :vartype bytes_processed: long :ivar total_bytes_to_process: Total amount of data to be processed by the job. :vartype total_bytes_to_process: long - :ivar files_processed: Number of files processed by the job as of now. + :ivar files_processed: Number of files processed. :vartype files_processed: long - :ivar total_files_to_process: Total number of files to be processed by the job. + :ivar total_files_to_process: Total files to process. :vartype total_files_to_process: long :ivar invalid_files_processed: Number of files not adhering to azure naming conventions which were processed by automatic renaming. @@ -493,13 +604,21 @@ class CopyProgress(msrest.serialization.Model): :vartype renamed_container_count: long :ivar files_errored_out: Number of files which could not be copied. :vartype files_errored_out: long + :ivar directories_errored_out: To indicate directories errored out in the job. + :vartype directories_errored_out: long + :ivar invalid_directories_processed: To indicate directories renamed. + :vartype invalid_directories_processed: long + :ivar is_enumeration_in_progress: To indicate if enumeration of data is in progress. + Until this is true, the TotalBytesToProcess may not be valid. + :vartype is_enumeration_in_progress: bool """ _validation = { 'storage_account_name': {'readonly': True}, - 'data_destination_type': {'readonly': True}, + 'transfer_type': {'readonly': True}, + 'data_account_type': {'readonly': True}, 'account_id': {'readonly': True}, - 'bytes_sent_to_cloud': {'readonly': True}, + 'bytes_processed': {'readonly': True}, 'total_bytes_to_process': {'readonly': True}, 'files_processed': {'readonly': True}, 'total_files_to_process': {'readonly': True}, @@ -507,13 +626,17 @@ class CopyProgress(msrest.serialization.Model): 'invalid_file_bytes_uploaded': {'readonly': True}, 'renamed_container_count': {'readonly': True}, 'files_errored_out': {'readonly': True}, + 'directories_errored_out': {'readonly': True}, + 'invalid_directories_processed': {'readonly': True}, + 'is_enumeration_in_progress': {'readonly': True}, } _attribute_map = { 'storage_account_name': {'key': 'storageAccountName', 'type': 'str'}, - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, + 'transfer_type': {'key': 'transferType', 'type': 'str'}, + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, 'account_id': {'key': 'accountId', 'type': 'str'}, - 'bytes_sent_to_cloud': {'key': 'bytesSentToCloud', 'type': 'long'}, + 'bytes_processed': {'key': 'bytesProcessed', 'type': 'long'}, 'total_bytes_to_process': {'key': 'totalBytesToProcess', 'type': 'long'}, 'files_processed': {'key': 'filesProcessed', 'type': 'long'}, 'total_files_to_process': {'key': 'totalFilesToProcess', 'type': 'long'}, @@ -521,6 +644,9 @@ class CopyProgress(msrest.serialization.Model): 'invalid_file_bytes_uploaded': {'key': 'invalidFileBytesUploaded', 'type': 'long'}, 'renamed_container_count': {'key': 'renamedContainerCount', 'type': 'long'}, 'files_errored_out': {'key': 'filesErroredOut', 'type': 'long'}, + 'directories_errored_out': {'key': 'directoriesErroredOut', 'type': 'long'}, + 'invalid_directories_processed': {'key': 'invalidDirectoriesProcessed', 'type': 'long'}, + 'is_enumeration_in_progress': {'key': 'isEnumerationInProgress', 'type': 'bool'}, } def __init__( @@ -529,9 +655,10 @@ def __init__( ): super(CopyProgress, self).__init__(**kwargs) self.storage_account_name = None - self.data_destination_type = None + self.transfer_type = None + self.data_account_type = None self.account_id = None - self.bytes_sent_to_cloud = None + self.bytes_processed = None self.total_bytes_to_process = None self.files_processed = None self.total_files_to_process = None @@ -539,32 +666,36 @@ def __init__( self.invalid_file_bytes_uploaded = None self.renamed_container_count = None self.files_errored_out = None + self.directories_errored_out = None + self.invalid_directories_processed = None + self.is_enumeration_in_progress = None class ValidationRequest(msrest.serialization.Model): - """Input request for all pre job creation validation. + """Minimum request requirement of any validation category. You probably want to use the sub-classes and not this class directly. Known sub-classes are: CreateJobValidations. All required parameters must be populated in order to send to Azure. - :param individual_request_details: Required. List of request details contain validationType and - its request as key and value respectively. - :type individual_request_details: list[~azure.mgmt.databox.models.ValidationInputRequest] :param validation_category: Required. Identify the nature of validation.Constant filled by server. :type validation_category: str + :param individual_request_details: Required. List of request details contain validationType and + its request as key and value respectively. + :type individual_request_details: + list[~data_box_management_client.models.ValidationInputRequest] """ _validation = { - 'individual_request_details': {'required': True}, 'validation_category': {'required': True}, + 'individual_request_details': {'required': True}, } _attribute_map = { - 'individual_request_details': {'key': 'individualRequestDetails', 'type': '[ValidationInputRequest]'}, 'validation_category': {'key': 'validationCategory', 'type': 'str'}, + 'individual_request_details': {'key': 'individualRequestDetails', 'type': '[ValidationInputRequest]'}, } _subtype_map = { @@ -576,8 +707,8 @@ def __init__( **kwargs ): super(ValidationRequest, self).__init__(**kwargs) - self.individual_request_details = kwargs['individual_request_details'] self.validation_category = None # type: Optional[str] + self.individual_request_details = kwargs['individual_request_details'] class CreateJobValidations(ValidationRequest): @@ -585,22 +716,23 @@ class CreateJobValidations(ValidationRequest): All required parameters must be populated in order to send to Azure. - :param individual_request_details: Required. List of request details contain validationType and - its request as key and value respectively. - :type individual_request_details: list[~azure.mgmt.databox.models.ValidationInputRequest] :param validation_category: Required. Identify the nature of validation.Constant filled by server. :type validation_category: str + :param individual_request_details: Required. List of request details contain validationType and + its request as key and value respectively. + :type individual_request_details: + list[~data_box_management_client.models.ValidationInputRequest] """ _validation = { - 'individual_request_details': {'required': True}, 'validation_category': {'required': True}, + 'individual_request_details': {'required': True}, } _attribute_map = { - 'individual_request_details': {'key': 'individualRequestDetails', 'type': '[ValidationInputRequest]'}, 'validation_category': {'key': 'validationCategory', 'type': 'str'}, + 'individual_request_details': {'key': 'individualRequestDetails', 'type': '[ValidationInputRequest]'}, } def __init__( @@ -615,15 +747,15 @@ class ValidationInputRequest(msrest.serialization.Model): """Minimum fields that must be present in any type of validation request. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ValidateAddress, CreateOrderLimitForSubscriptionValidationRequest, DataDestinationDetailsValidationRequest, PreferencesValidationRequest, SkuAvailabilityValidationRequest, SubscriptionIsAllowedToCreateJobValidationRequest. + sub-classes are: ValidateAddress, CreateOrderLimitForSubscriptionValidationRequest, DataTransferDetailsValidationRequest, PreferencesValidationRequest, SkuAvailabilityValidationRequest, SubscriptionIsAllowedToCreateJobValidationRequest. All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator """ _validation = { @@ -635,7 +767,7 @@ class ValidationInputRequest(msrest.serialization.Model): } _subtype_map = { - 'validation_type': {'ValidateAddress': 'ValidateAddress', 'ValidateCreateOrderLimit': 'CreateOrderLimitForSubscriptionValidationRequest', 'ValidateDataDestinationDetails': 'DataDestinationDetailsValidationRequest', 'ValidatePreferences': 'PreferencesValidationRequest', 'ValidateSkuAvailability': 'SkuAvailabilityValidationRequest', 'ValidateSubscriptionIsAllowedToCreateJob': 'SubscriptionIsAllowedToCreateJobValidationRequest'} + 'validation_type': {'ValidateAddress': 'ValidateAddress', 'ValidateCreateOrderLimit': 'CreateOrderLimitForSubscriptionValidationRequest', 'ValidateDataTransferDetails': 'DataTransferDetailsValidationRequest', 'ValidatePreferences': 'PreferencesValidationRequest', 'ValidateSkuAvailability': 'SkuAvailabilityValidationRequest', 'ValidateSubscriptionIsAllowedToCreateJob': 'SubscriptionIsAllowedToCreateJobValidationRequest'} } def __init__( @@ -652,13 +784,13 @@ class CreateOrderLimitForSubscriptionValidationRequest(ValidationInputRequest): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :param device_type: Required. Device type to be used for the job. Possible values include: - "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type device_type: str or ~azure.mgmt.databox.models.SkuName + "DataBox", "DataBoxDisk", "DataBoxHeavy", "DataBoxCustomerDisk". + :type device_type: str or ~data_box_management_client.models.SkuName """ _validation = { @@ -688,15 +820,15 @@ class CreateOrderLimitForSubscriptionValidationResponseProperties(ValidationInpu All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar status: Create order limit validation status. Possible values include: "Valid", "Invalid", "Skipped". - :vartype status: str or ~azure.mgmt.databox.models.ValidationStatus + :vartype status: str or ~data_box_management_client.models.ValidationStatus """ _validation = { @@ -707,7 +839,7 @@ class CreateOrderLimitForSubscriptionValidationResponseProperties(ValidationInpu _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'status': {'key': 'status', 'type': 'str'}, } @@ -720,6 +852,141 @@ def __init__( self.status = None +class JobSecrets(msrest.serialization.Model): + """The base class for the secrets. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: DataboxJobSecrets, CustomerDiskJobSecrets, DataBoxDiskJobSecrets, DataBoxHeavyJobSecrets. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant + filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", + "DataBoxCustomerDisk". + :type job_secrets_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. + :vartype dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar error: Error while fetching the secrets. + :vartype error: ~data_box_management_client.models.CloudError + """ + + _validation = { + 'job_secrets_type': {'required': True}, + 'dc_access_security_code': {'readonly': True}, + 'error': {'readonly': True}, + } + + _attribute_map = { + 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, + 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, + 'error': {'key': 'error', 'type': 'CloudError'}, + } + + _subtype_map = { + 'job_secrets_type': {'DataBox': 'DataboxJobSecrets', 'DataBoxCustomerDisk': 'CustomerDiskJobSecrets', 'DataBoxDisk': 'DataBoxDiskJobSecrets', 'DataBoxHeavy': 'DataBoxHeavyJobSecrets'} + } + + def __init__( + self, + **kwargs + ): + super(JobSecrets, self).__init__(**kwargs) + self.job_secrets_type = None # type: Optional[str] + self.dc_access_security_code = None + self.error = None + + +class CustomerDiskJobSecrets(JobSecrets): + """The secrets related to customer disk job. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant + filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", + "DataBoxCustomerDisk". + :type job_secrets_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. + :vartype dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar error: Error while fetching the secrets. + :vartype error: ~data_box_management_client.models.CloudError + :ivar disk_secrets: Contains the list of secrets object for that device. + :vartype disk_secrets: list[~data_box_management_client.models.DiskSecret] + :ivar carrier_account_number: Carrier Account Number of the customer. + :vartype carrier_account_number: str + """ + + _validation = { + 'job_secrets_type': {'required': True}, + 'dc_access_security_code': {'readonly': True}, + 'error': {'readonly': True}, + 'disk_secrets': {'readonly': True}, + 'carrier_account_number': {'readonly': True}, + } + + _attribute_map = { + 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, + 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, + 'error': {'key': 'error', 'type': 'CloudError'}, + 'disk_secrets': {'key': 'diskSecrets', 'type': '[DiskSecret]'}, + 'carrier_account_number': {'key': 'carrierAccountNumber', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(CustomerDiskJobSecrets, self).__init__(**kwargs) + self.job_secrets_type = 'DataBoxCustomerDisk' # type: str + self.disk_secrets = None + self.carrier_account_number = None + + +class DataAccountDetails(msrest.serialization.Model): + """Account details of the data to be transferred. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ManagedDiskDetails, StorageAccountDetails. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Account Type of the data to be transferred.Constant filled + by server. Possible values include: "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param share_password: Password for all the shares to be created on the device. Should not be + passed for TransferType:ExportFromAzure jobs. If this is not passed, the service will generate + password itself. This will not be returned in Get Call. Password Requirements : Password must + be minimum of 12 and maximum of 64 characters. Password must have at least one uppercase + alphabet, one number and one special character. Password cannot have the following characters : + IilLoO0 Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. + :type share_password: str + """ + + _validation = { + 'data_account_type': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'share_password': {'key': 'sharePassword', 'type': 'str'}, + } + + _subtype_map = { + 'data_account_type': {'ManagedDisk': 'ManagedDiskDetails', 'StorageAccount': 'StorageAccountDetails'} + } + + def __init__( + self, + **kwargs + ): + super(DataAccountDetails, self).__init__(**kwargs) + self.data_account_type = None # type: Optional[str] + self.share_password = kwargs.get('share_password', None) + + class DataBoxAccountCopyLogDetails(CopyLogDetails): """Copy log details for a storage account of a DataBox job. @@ -728,24 +995,30 @@ class DataBoxAccountCopyLogDetails(CopyLogDetails): All required parameters must be populated in order to send to Azure. :param copy_log_details_type: Required. Indicates the type of job details.Constant filled by - server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type copy_log_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :ivar account_name: Destination account name. + server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", + "DataBoxCustomerDisk". + :type copy_log_details_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar account_name: Account name. :vartype account_name: str :ivar copy_log_link: Link for copy logs. :vartype copy_log_link: str + :ivar copy_verbose_log_link: Link for copy verbose logs. This will be set only when + LogCollectionLevel is set to Verbose. + :vartype copy_verbose_log_link: str """ _validation = { 'copy_log_details_type': {'required': True}, 'account_name': {'readonly': True}, 'copy_log_link': {'readonly': True}, + 'copy_verbose_log_link': {'readonly': True}, } _attribute_map = { 'copy_log_details_type': {'key': 'copyLogDetailsType', 'type': 'str'}, 'account_name': {'key': 'accountName', 'type': 'str'}, 'copy_log_link': {'key': 'copyLogLink', 'type': 'str'}, + 'copy_verbose_log_link': {'key': 'copyVerboseLogLink', 'type': 'str'}, } def __init__( @@ -756,18 +1029,20 @@ def __init__( self.copy_log_details_type = 'DataBox' # type: str self.account_name = None self.copy_log_link = None + self.copy_verbose_log_link = None -class DataBoxDiskCopyLogDetails(CopyLogDetails): - """Copy Log Details for a disk. +class DataBoxCustomerDiskCopyLogDetails(CopyLogDetails): + """Copy Log Details for customer disk. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :param copy_log_details_type: Required. Indicates the type of job details.Constant filled by - server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type copy_log_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator + server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", + "DataBoxCustomerDisk". + :type copy_log_details_type: str or ~data_box_management_client.models.ClassDiscriminator :ivar disk_serial_number: Disk Serial Number. :vartype disk_serial_number: str :ivar error_log_link: Link for copy error logs. @@ -794,127 +1069,193 @@ def __init__( self, **kwargs ): - super(DataBoxDiskCopyLogDetails, self).__init__(**kwargs) - self.copy_log_details_type = 'DataBoxDisk' # type: str + super(DataBoxCustomerDiskCopyLogDetails, self).__init__(**kwargs) + self.copy_log_details_type = 'DataBoxCustomerDisk' # type: str self.disk_serial_number = None self.error_log_link = None self.verbose_log_link = None -class DataBoxDiskCopyProgress(msrest.serialization.Model): - """DataBox Disk Copy Progress. +class DataBoxCustomerDiskCopyProgress(CopyProgress): + """DataBox CustomerDisk Copy Progress. Variables are only populated by the server, and will be ignored when sending a request. - :ivar serial_number: The serial number of the disk. - :vartype serial_number: str - :ivar bytes_copied: Bytes copied during the copy of disk. - :vartype bytes_copied: long - :ivar percent_complete: Indicates the percentage completed for the copy of the disk. - :vartype percent_complete: int - :ivar status: The Status of the copy. Possible values include: "NotStarted", "InProgress", + :ivar storage_account_name: Name of the storage account. This will be empty for data account + types other than storage account. + :vartype storage_account_name: str + :ivar transfer_type: Transfer type of data. Possible values include: "ImportToAzure", + "ExportFromAzure". + :vartype transfer_type: str or ~data_box_management_client.models.TransferType + :ivar data_account_type: Data Account Type. Possible values include: "StorageAccount", + "ManagedDisk". + :vartype data_account_type: str or ~data_box_management_client.models.DataAccountType + :ivar account_id: Id of the account where the data needs to be uploaded. + :vartype account_id: str + :ivar bytes_processed: To indicate bytes transferred. + :vartype bytes_processed: long + :ivar total_bytes_to_process: Total amount of data to be processed by the job. + :vartype total_bytes_to_process: long + :ivar files_processed: Number of files processed. + :vartype files_processed: long + :ivar total_files_to_process: Total files to process. + :vartype total_files_to_process: long + :ivar invalid_files_processed: Number of files not adhering to azure naming conventions which + were processed by automatic renaming. + :vartype invalid_files_processed: long + :ivar invalid_file_bytes_uploaded: Total amount of data not adhering to azure naming + conventions which were processed by automatic renaming. + :vartype invalid_file_bytes_uploaded: long + :ivar renamed_container_count: Number of folders not adhering to azure naming conventions which + were processed by automatic renaming. + :vartype renamed_container_count: long + :ivar files_errored_out: Number of files which could not be copied. + :vartype files_errored_out: long + :ivar directories_errored_out: To indicate directories errored out in the job. + :vartype directories_errored_out: long + :ivar invalid_directories_processed: To indicate directories renamed. + :vartype invalid_directories_processed: long + :ivar is_enumeration_in_progress: To indicate if enumeration of data is in progress. + Until this is true, the TotalBytesToProcess may not be valid. + :vartype is_enumeration_in_progress: bool + :ivar disk_serial_number: Disk Serial Number. + :vartype disk_serial_number: str + :ivar copy_status: The Status of the copy. Possible values include: "NotStarted", "InProgress", "Completed", "CompletedWithErrors", "Failed", "NotReturned", "HardwareError", "DeviceFormatted", "DeviceMetadataModified", "StorageAccountNotAccessible", "UnsupportedData". - :vartype status: str or ~azure.mgmt.databox.models.CopyStatus + :vartype copy_status: str or ~data_box_management_client.models.CopyStatus """ _validation = { - 'serial_number': {'readonly': True}, - 'bytes_copied': {'readonly': True}, - 'percent_complete': {'readonly': True}, - 'status': {'readonly': True}, + 'storage_account_name': {'readonly': True}, + 'transfer_type': {'readonly': True}, + 'data_account_type': {'readonly': True}, + 'account_id': {'readonly': True}, + 'bytes_processed': {'readonly': True}, + 'total_bytes_to_process': {'readonly': True}, + 'files_processed': {'readonly': True}, + 'total_files_to_process': {'readonly': True}, + 'invalid_files_processed': {'readonly': True}, + 'invalid_file_bytes_uploaded': {'readonly': True}, + 'renamed_container_count': {'readonly': True}, + 'files_errored_out': {'readonly': True}, + 'directories_errored_out': {'readonly': True}, + 'invalid_directories_processed': {'readonly': True}, + 'is_enumeration_in_progress': {'readonly': True}, + 'disk_serial_number': {'readonly': True}, + 'copy_status': {'readonly': True}, } _attribute_map = { - 'serial_number': {'key': 'serialNumber', 'type': 'str'}, - 'bytes_copied': {'key': 'bytesCopied', 'type': 'long'}, - 'percent_complete': {'key': 'percentComplete', 'type': 'int'}, - 'status': {'key': 'status', 'type': 'str'}, + 'storage_account_name': {'key': 'storageAccountName', 'type': 'str'}, + 'transfer_type': {'key': 'transferType', 'type': 'str'}, + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'account_id': {'key': 'accountId', 'type': 'str'}, + 'bytes_processed': {'key': 'bytesProcessed', 'type': 'long'}, + 'total_bytes_to_process': {'key': 'totalBytesToProcess', 'type': 'long'}, + 'files_processed': {'key': 'filesProcessed', 'type': 'long'}, + 'total_files_to_process': {'key': 'totalFilesToProcess', 'type': 'long'}, + 'invalid_files_processed': {'key': 'invalidFilesProcessed', 'type': 'long'}, + 'invalid_file_bytes_uploaded': {'key': 'invalidFileBytesUploaded', 'type': 'long'}, + 'renamed_container_count': {'key': 'renamedContainerCount', 'type': 'long'}, + 'files_errored_out': {'key': 'filesErroredOut', 'type': 'long'}, + 'directories_errored_out': {'key': 'directoriesErroredOut', 'type': 'long'}, + 'invalid_directories_processed': {'key': 'invalidDirectoriesProcessed', 'type': 'long'}, + 'is_enumeration_in_progress': {'key': 'isEnumerationInProgress', 'type': 'bool'}, + 'disk_serial_number': {'key': 'diskSerialNumber', 'type': 'str'}, + 'copy_status': {'key': 'copyStatus', 'type': 'str'}, } def __init__( self, **kwargs ): - super(DataBoxDiskCopyProgress, self).__init__(**kwargs) - self.serial_number = None - self.bytes_copied = None - self.percent_complete = None - self.status = None + super(DataBoxCustomerDiskCopyProgress, self).__init__(**kwargs) + self.disk_serial_number = None + self.copy_status = None class JobDetails(msrest.serialization.Model): """Job details. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DataBoxJobDetails, DataBoxDiskJobDetails, DataBoxHeavyJobDetails. + sub-classes are: DataBoxJobDetails, DataBoxCustomerDiskJobDetails, DataBoxDiskJobDetails, DataBoxHeavyJobDetails. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param expected_data_size_in_terabytes: The expected size of the data, which needs to be - transferred in this job, in terabytes. - :type expected_data_size_in_terabytes: int :ivar job_stages: List of stages that run in the job. - :vartype job_stages: list[~azure.mgmt.databox.models.JobStages] + :vartype job_stages: list[~data_box_management_client.models.JobStages] :param contact_details: Required. Contact details for notification and shipping. - :type contact_details: ~azure.mgmt.databox.models.ContactDetails - :param shipping_address: Required. Shipping address of the customer. - :type shipping_address: ~azure.mgmt.databox.models.ShippingAddress + :type contact_details: ~data_box_management_client.models.ContactDetails + :param shipping_address: Shipping address of the customer. + :type shipping_address: ~data_box_management_client.models.ShippingAddress :ivar delivery_package: Delivery package shipping details. - :vartype delivery_package: ~azure.mgmt.databox.models.PackageShippingDetails + :vartype delivery_package: ~data_box_management_client.models.PackageShippingDetails :ivar return_package: Return package shipping details. - :vartype return_package: ~azure.mgmt.databox.models.PackageShippingDetails - :param destination_account_details: Required. Destination account details. - :type destination_account_details: list[~azure.mgmt.databox.models.DestinationAccountDetails] - :ivar error_details: Error details for failure. This is optional. - :vartype error_details: list[~azure.mgmt.databox.models.JobErrorDetails] + :vartype return_package: ~data_box_management_client.models.PackageShippingDetails + :param data_import_details: Details of the data to be imported into azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param data_export_details: Details of the data to be exported from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] :param job_details_type: Required. Indicates the type of job details.Constant filled by server. - Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator + Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", "DataBoxCustomerDisk". + :type job_details_type: str or ~data_box_management_client.models.ClassDiscriminator :param preferences: Preferences for the order. - :type preferences: ~azure.mgmt.databox.models.Preferences + :type preferences: ~data_box_management_client.models.Preferences :ivar copy_log_details: List of copy log details. - :vartype copy_log_details: list[~azure.mgmt.databox.models.CopyLogDetails] + :vartype copy_log_details: list[~data_box_management_client.models.CopyLogDetails] :ivar reverse_shipment_label_sas_key: Shared access key to download the return shipment label. :vartype reverse_shipment_label_sas_key: str :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. :vartype chain_of_custody_sas_key: str + :param key_encryption_key: Details about which key encryption type is being used. + :type key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey + :param expected_data_size_in_tera_bytes: The expected size of the data, which needs to be + transferred in this job, in terabytes. + :type expected_data_size_in_tera_bytes: int + :ivar actions: Available actions on the job. + :vartype actions: list[str or ~data_box_management_client.models.CustomerResolutionCode] + :ivar last_mitigation_action_on_job: Last mitigation action performed on the job. + :vartype last_mitigation_action_on_job: + ~data_box_management_client.models.LastMitigationActionOnJob """ _validation = { 'job_stages': {'readonly': True}, 'contact_details': {'required': True}, - 'shipping_address': {'required': True}, 'delivery_package': {'readonly': True}, 'return_package': {'readonly': True}, - 'destination_account_details': {'required': True}, - 'error_details': {'readonly': True}, 'job_details_type': {'required': True}, 'copy_log_details': {'readonly': True}, 'reverse_shipment_label_sas_key': {'readonly': True}, 'chain_of_custody_sas_key': {'readonly': True}, + 'actions': {'readonly': True}, + 'last_mitigation_action_on_job': {'readonly': True}, } _attribute_map = { - 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, 'preferences': {'key': 'preferences', 'type': 'Preferences'}, 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, + 'expected_data_size_in_tera_bytes': {'key': 'expectedDataSizeInTeraBytes', 'type': 'int'}, + 'actions': {'key': 'actions', 'type': '[str]'}, + 'last_mitigation_action_on_job': {'key': 'lastMitigationActionOnJob', 'type': 'LastMitigationActionOnJob'}, } _subtype_map = { - 'job_details_type': {'DataBox': 'DataBoxJobDetails', 'DataBoxDisk': 'DataBoxDiskJobDetails', 'DataBoxHeavy': 'DataBoxHeavyJobDetails'} + 'job_details_type': {'DataBox': 'DataBoxJobDetails', 'DataBoxCustomerDisk': 'DataBoxCustomerDiskJobDetails', 'DataBoxDisk': 'DataBoxDiskJobDetails', 'DataBoxHeavy': 'DataBoxHeavyJobDetails'} } def __init__( @@ -922,152 +1263,335 @@ def __init__( **kwargs ): super(JobDetails, self).__init__(**kwargs) - self.expected_data_size_in_terabytes = kwargs.get('expected_data_size_in_terabytes', None) self.job_stages = None self.contact_details = kwargs['contact_details'] - self.shipping_address = kwargs['shipping_address'] + self.shipping_address = kwargs.get('shipping_address', None) self.delivery_package = None self.return_package = None - self.destination_account_details = kwargs['destination_account_details'] - self.error_details = None + self.data_import_details = kwargs.get('data_import_details', None) + self.data_export_details = kwargs.get('data_export_details', None) self.job_details_type = None # type: Optional[str] self.preferences = kwargs.get('preferences', None) self.copy_log_details = None self.reverse_shipment_label_sas_key = None self.chain_of_custody_sas_key = None + self.key_encryption_key = kwargs.get('key_encryption_key', None) + self.expected_data_size_in_tera_bytes = kwargs.get('expected_data_size_in_tera_bytes', None) + self.actions = None + self.last_mitigation_action_on_job = None -class DataBoxDiskJobDetails(JobDetails): - """DataBox Disk Job Details. +class DataBoxCustomerDiskJobDetails(JobDetails): + """Customer disk job details. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param expected_data_size_in_terabytes: The expected size of the data, which needs to be - transferred in this job, in terabytes. - :type expected_data_size_in_terabytes: int :ivar job_stages: List of stages that run in the job. - :vartype job_stages: list[~azure.mgmt.databox.models.JobStages] + :vartype job_stages: list[~data_box_management_client.models.JobStages] :param contact_details: Required. Contact details for notification and shipping. - :type contact_details: ~azure.mgmt.databox.models.ContactDetails - :param shipping_address: Required. Shipping address of the customer. - :type shipping_address: ~azure.mgmt.databox.models.ShippingAddress + :type contact_details: ~data_box_management_client.models.ContactDetails + :param shipping_address: Shipping address of the customer. + :type shipping_address: ~data_box_management_client.models.ShippingAddress :ivar delivery_package: Delivery package shipping details. - :vartype delivery_package: ~azure.mgmt.databox.models.PackageShippingDetails + :vartype delivery_package: ~data_box_management_client.models.PackageShippingDetails :ivar return_package: Return package shipping details. - :vartype return_package: ~azure.mgmt.databox.models.PackageShippingDetails - :param destination_account_details: Required. Destination account details. - :type destination_account_details: list[~azure.mgmt.databox.models.DestinationAccountDetails] - :ivar error_details: Error details for failure. This is optional. - :vartype error_details: list[~azure.mgmt.databox.models.JobErrorDetails] + :vartype return_package: ~data_box_management_client.models.PackageShippingDetails + :param data_import_details: Details of the data to be imported into azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param data_export_details: Details of the data to be exported from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] :param job_details_type: Required. Indicates the type of job details.Constant filled by server. - Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator + Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", "DataBoxCustomerDisk". + :type job_details_type: str or ~data_box_management_client.models.ClassDiscriminator :param preferences: Preferences for the order. - :type preferences: ~azure.mgmt.databox.models.Preferences + :type preferences: ~data_box_management_client.models.Preferences :ivar copy_log_details: List of copy log details. - :vartype copy_log_details: list[~azure.mgmt.databox.models.CopyLogDetails] + :vartype copy_log_details: list[~data_box_management_client.models.CopyLogDetails] :ivar reverse_shipment_label_sas_key: Shared access key to download the return shipment label. :vartype reverse_shipment_label_sas_key: str :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. :vartype chain_of_custody_sas_key: str - :param preferred_disks: User preference on what size disks are needed for the job. The map is - from the disk size in TB to the count. Eg. {2,5} means 5 disks of 2 TB size. Key is string but - will be checked against an int. - :type preferred_disks: dict[str, int] - :ivar copy_progress: Copy progress per disk. - :vartype copy_progress: list[~azure.mgmt.databox.models.DataBoxDiskCopyProgress] - :ivar disks_and_size_details: Contains the map of disk serial number to the disk size being - used for the job. Is returned only after the disks are shipped to the customer. - :vartype disks_and_size_details: dict[str, int] - :param passkey: User entered passkey for DataBox Disk job. - :type passkey: str + :param key_encryption_key: Details about which key encryption type is being used. + :type key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey + :param expected_data_size_in_tera_bytes: The expected size of the data, which needs to be + transferred in this job, in terabytes. + :type expected_data_size_in_tera_bytes: int + :ivar actions: Available actions on the job. + :vartype actions: list[str or ~data_box_management_client.models.CustomerResolutionCode] + :ivar last_mitigation_action_on_job: Last mitigation action performed on the job. + :vartype last_mitigation_action_on_job: + ~data_box_management_client.models.LastMitigationActionOnJob + :param import_disk_details_collection: Contains the map of disk serial number to the disk + details for import jobs. + :type import_disk_details_collection: dict[str, + ~data_box_management_client.models.ImportDiskDetails] + :ivar export_disk_details_collection: Contains the map of disk serial number to the disk + details for export jobs. + :vartype export_disk_details_collection: dict[str, + ~data_box_management_client.models.ExportDiskDetails] + :ivar copy_progress_list: Copy progress per disk. + :vartype copy_progress_list: + list[~data_box_management_client.models.DataBoxCustomerDiskCopyProgress] + :ivar delivery_package_details: Delivery package shipping details. + :vartype delivery_package_details: ~data_box_management_client.models.PackageCarrierInfo + :param return_package_details: Required. Return package shipping details. + :type return_package_details: ~data_box_management_client.models.PackageCarrierDetails + :param xt_passthrough_job_arm_id: ARM id of the XT passthrough job. + :type xt_passthrough_job_arm_id: str + :ivar datacenter_address: Datacenter address for given sku in a region. + :vartype datacenter_address: ~data_box_management_client.models.DatacenterAddressResponse """ _validation = { 'job_stages': {'readonly': True}, 'contact_details': {'required': True}, - 'shipping_address': {'required': True}, 'delivery_package': {'readonly': True}, 'return_package': {'readonly': True}, - 'destination_account_details': {'required': True}, - 'error_details': {'readonly': True}, 'job_details_type': {'required': True}, 'copy_log_details': {'readonly': True}, 'reverse_shipment_label_sas_key': {'readonly': True}, 'chain_of_custody_sas_key': {'readonly': True}, - 'copy_progress': {'readonly': True}, - 'disks_and_size_details': {'readonly': True}, + 'actions': {'readonly': True}, + 'last_mitigation_action_on_job': {'readonly': True}, + 'export_disk_details_collection': {'readonly': True}, + 'copy_progress_list': {'readonly': True}, + 'delivery_package_details': {'readonly': True}, + 'return_package_details': {'required': True}, + 'datacenter_address': {'readonly': True}, } _attribute_map = { - 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, 'preferences': {'key': 'preferences', 'type': 'Preferences'}, 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, - 'preferred_disks': {'key': 'preferredDisks', 'type': '{int}'}, - 'copy_progress': {'key': 'copyProgress', 'type': '[DataBoxDiskCopyProgress]'}, - 'disks_and_size_details': {'key': 'disksAndSizeDetails', 'type': '{int}'}, - 'passkey': {'key': 'passkey', 'type': 'str'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, + 'expected_data_size_in_tera_bytes': {'key': 'expectedDataSizeInTeraBytes', 'type': 'int'}, + 'actions': {'key': 'actions', 'type': '[str]'}, + 'last_mitigation_action_on_job': {'key': 'lastMitigationActionOnJob', 'type': 'LastMitigationActionOnJob'}, + 'import_disk_details_collection': {'key': 'importDiskDetailsCollection', 'type': '{ImportDiskDetails}'}, + 'export_disk_details_collection': {'key': 'exportDiskDetailsCollection', 'type': '{ExportDiskDetails}'}, + 'copy_progress_list': {'key': 'copyProgressList', 'type': '[DataBoxCustomerDiskCopyProgress]'}, + 'delivery_package_details': {'key': 'deliveryPackageDetails', 'type': 'PackageCarrierInfo'}, + 'return_package_details': {'key': 'returnPackageDetails', 'type': 'PackageCarrierDetails'}, + 'xt_passthrough_job_arm_id': {'key': 'xtPassthroughJobArmId', 'type': 'str'}, + 'datacenter_address': {'key': 'datacenterAddress', 'type': 'DatacenterAddressResponse'}, } def __init__( self, **kwargs ): - super(DataBoxDiskJobDetails, self).__init__(**kwargs) - self.job_details_type = 'DataBoxDisk' # type: str - self.preferred_disks = kwargs.get('preferred_disks', None) - self.copy_progress = None - self.disks_and_size_details = None - self.passkey = kwargs.get('passkey', None) + super(DataBoxCustomerDiskJobDetails, self).__init__(**kwargs) + self.job_details_type = 'DataBoxCustomerDisk' # type: str + self.import_disk_details_collection = kwargs.get('import_disk_details_collection', None) + self.export_disk_details_collection = None + self.copy_progress_list = None + self.delivery_package_details = None + self.return_package_details = kwargs['return_package_details'] + self.xt_passthrough_job_arm_id = kwargs.get('xt_passthrough_job_arm_id', None) + self.datacenter_address = None -class JobSecrets(msrest.serialization.Model): - """The base class for the secrets. +class DataBoxDiskCopyLogDetails(CopyLogDetails): + """Copy Log Details for a disk. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DataboxJobSecrets, DataBoxDiskJobSecrets, DataBoxHeavyJobSecrets. + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant - filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_secrets_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :param dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. - :type dc_access_security_code: ~azure.mgmt.databox.models.DcAccessSecurityCode + :param copy_log_details_type: Required. Indicates the type of job details.Constant filled by + server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", + "DataBoxCustomerDisk". + :type copy_log_details_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar disk_serial_number: Disk Serial Number. + :vartype disk_serial_number: str + :ivar error_log_link: Link for copy error logs. + :vartype error_log_link: str + :ivar verbose_log_link: Link for copy verbose logs. + :vartype verbose_log_link: str """ _validation = { - 'job_secrets_type': {'required': True}, + 'copy_log_details_type': {'required': True}, + 'disk_serial_number': {'readonly': True}, + 'error_log_link': {'readonly': True}, + 'verbose_log_link': {'readonly': True}, } _attribute_map = { - 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, - 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, + 'copy_log_details_type': {'key': 'copyLogDetailsType', 'type': 'str'}, + 'disk_serial_number': {'key': 'diskSerialNumber', 'type': 'str'}, + 'error_log_link': {'key': 'errorLogLink', 'type': 'str'}, + 'verbose_log_link': {'key': 'verboseLogLink', 'type': 'str'}, } - _subtype_map = { - 'job_secrets_type': {'DataBox': 'DataboxJobSecrets', 'DataBoxDisk': 'DataBoxDiskJobSecrets', 'DataBoxHeavy': 'DataBoxHeavyJobSecrets'} + def __init__( + self, + **kwargs + ): + super(DataBoxDiskCopyLogDetails, self).__init__(**kwargs) + self.copy_log_details_type = 'DataBoxDisk' # type: str + self.disk_serial_number = None + self.error_log_link = None + self.verbose_log_link = None + + +class DataBoxDiskCopyProgress(msrest.serialization.Model): + """DataBox Disk Copy Progress. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar serial_number: The serial number of the disk. + :vartype serial_number: str + :ivar bytes_copied: Bytes copied during the copy of disk. + :vartype bytes_copied: long + :ivar percent_complete: Indicates the percentage completed for the copy of the disk. + :vartype percent_complete: int + :ivar status: The Status of the copy. Possible values include: "NotStarted", "InProgress", + "Completed", "CompletedWithErrors", "Failed", "NotReturned", "HardwareError", + "DeviceFormatted", "DeviceMetadataModified", "StorageAccountNotAccessible", "UnsupportedData". + :vartype status: str or ~data_box_management_client.models.CopyStatus + """ + + _validation = { + 'serial_number': {'readonly': True}, + 'bytes_copied': {'readonly': True}, + 'percent_complete': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'serial_number': {'key': 'serialNumber', 'type': 'str'}, + 'bytes_copied': {'key': 'bytesCopied', 'type': 'long'}, + 'percent_complete': {'key': 'percentComplete', 'type': 'int'}, + 'status': {'key': 'status', 'type': 'str'}, } def __init__( self, **kwargs ): - super(JobSecrets, self).__init__(**kwargs) - self.job_secrets_type = None # type: Optional[str] - self.dc_access_security_code = kwargs.get('dc_access_security_code', None) + super(DataBoxDiskCopyProgress, self).__init__(**kwargs) + self.serial_number = None + self.bytes_copied = None + self.percent_complete = None + self.status = None + + +class DataBoxDiskJobDetails(JobDetails): + """DataBox Disk Job Details. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar job_stages: List of stages that run in the job. + :vartype job_stages: list[~data_box_management_client.models.JobStages] + :param contact_details: Required. Contact details for notification and shipping. + :type contact_details: ~data_box_management_client.models.ContactDetails + :param shipping_address: Shipping address of the customer. + :type shipping_address: ~data_box_management_client.models.ShippingAddress + :ivar delivery_package: Delivery package shipping details. + :vartype delivery_package: ~data_box_management_client.models.PackageShippingDetails + :ivar return_package: Return package shipping details. + :vartype return_package: ~data_box_management_client.models.PackageShippingDetails + :param data_import_details: Details of the data to be imported into azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param data_export_details: Details of the data to be exported from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] + :param job_details_type: Required. Indicates the type of job details.Constant filled by server. + Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", "DataBoxCustomerDisk". + :type job_details_type: str or ~data_box_management_client.models.ClassDiscriminator + :param preferences: Preferences for the order. + :type preferences: ~data_box_management_client.models.Preferences + :ivar copy_log_details: List of copy log details. + :vartype copy_log_details: list[~data_box_management_client.models.CopyLogDetails] + :ivar reverse_shipment_label_sas_key: Shared access key to download the return shipment label. + :vartype reverse_shipment_label_sas_key: str + :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. + :vartype chain_of_custody_sas_key: str + :param key_encryption_key: Details about which key encryption type is being used. + :type key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey + :param expected_data_size_in_tera_bytes: The expected size of the data, which needs to be + transferred in this job, in terabytes. + :type expected_data_size_in_tera_bytes: int + :ivar actions: Available actions on the job. + :vartype actions: list[str or ~data_box_management_client.models.CustomerResolutionCode] + :ivar last_mitigation_action_on_job: Last mitigation action performed on the job. + :vartype last_mitigation_action_on_job: + ~data_box_management_client.models.LastMitigationActionOnJob + :param preferred_disks: User preference on what size disks are needed for the job. The map is + from the disk size in TB to the count. Eg. {2,5} means 5 disks of 2 TB size. Key is string but + will be checked against an int. + :type preferred_disks: dict[str, int] + :ivar copy_progress: Copy progress per disk. + :vartype copy_progress: list[~data_box_management_client.models.DataBoxDiskCopyProgress] + :ivar disks_and_size_details: Contains the map of disk serial number to the disk size being + used for the job. Is returned only after the disks are shipped to the customer. + :vartype disks_and_size_details: dict[str, int] + :param passkey: User entered passkey for DataBox Disk job. + :type passkey: str + """ + + _validation = { + 'job_stages': {'readonly': True}, + 'contact_details': {'required': True}, + 'delivery_package': {'readonly': True}, + 'return_package': {'readonly': True}, + 'job_details_type': {'required': True}, + 'copy_log_details': {'readonly': True}, + 'reverse_shipment_label_sas_key': {'readonly': True}, + 'chain_of_custody_sas_key': {'readonly': True}, + 'actions': {'readonly': True}, + 'last_mitigation_action_on_job': {'readonly': True}, + 'copy_progress': {'readonly': True}, + 'disks_and_size_details': {'readonly': True}, + } + + _attribute_map = { + 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, + 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, + 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, + 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, + 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, + 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, + 'preferences': {'key': 'preferences', 'type': 'Preferences'}, + 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, + 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, + 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, + 'expected_data_size_in_tera_bytes': {'key': 'expectedDataSizeInTeraBytes', 'type': 'int'}, + 'actions': {'key': 'actions', 'type': '[str]'}, + 'last_mitigation_action_on_job': {'key': 'lastMitigationActionOnJob', 'type': 'LastMitigationActionOnJob'}, + 'preferred_disks': {'key': 'preferredDisks', 'type': '{int}'}, + 'copy_progress': {'key': 'copyProgress', 'type': '[DataBoxDiskCopyProgress]'}, + 'disks_and_size_details': {'key': 'disksAndSizeDetails', 'type': '{int}'}, + 'passkey': {'key': 'passkey', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DataBoxDiskJobDetails, self).__init__(**kwargs) + self.job_details_type = 'DataBoxDisk' # type: str + self.preferred_disks = kwargs.get('preferred_disks', None) + self.copy_progress = None + self.disks_and_size_details = None + self.passkey = kwargs.get('passkey', None) class DataBoxDiskJobSecrets(JobSecrets): @@ -1078,12 +1602,15 @@ class DataBoxDiskJobSecrets(JobSecrets): All required parameters must be populated in order to send to Azure. :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant - filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_secrets_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :param dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. - :type dc_access_security_code: ~azure.mgmt.databox.models.DcAccessSecurityCode + filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", + "DataBoxCustomerDisk". + :type job_secrets_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. + :vartype dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar error: Error while fetching the secrets. + :vartype error: ~data_box_management_client.models.CloudError :ivar disk_secrets: Contains the list of secrets object for that device. - :vartype disk_secrets: list[~azure.mgmt.databox.models.DiskSecret] + :vartype disk_secrets: list[~data_box_management_client.models.DiskSecret] :ivar pass_key: PassKey for the disk Job. :vartype pass_key: str :ivar is_passkey_user_defined: Whether passkey was provided by user. @@ -1092,6 +1619,8 @@ class DataBoxDiskJobSecrets(JobSecrets): _validation = { 'job_secrets_type': {'required': True}, + 'dc_access_security_code': {'readonly': True}, + 'error': {'readonly': True}, 'disk_secrets': {'readonly': True}, 'pass_key': {'readonly': True}, 'is_passkey_user_defined': {'readonly': True}, @@ -1100,6 +1629,7 @@ class DataBoxDiskJobSecrets(JobSecrets): _attribute_map = { 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'disk_secrets': {'key': 'diskSecrets', 'type': '[DiskSecret]'}, 'pass_key': {'key': 'passKey', 'type': 'str'}, 'is_passkey_user_defined': {'key': 'isPasskeyUserDefined', 'type': 'bool'}, @@ -1124,24 +1654,30 @@ class DataBoxHeavyAccountCopyLogDetails(CopyLogDetails): All required parameters must be populated in order to send to Azure. :param copy_log_details_type: Required. Indicates the type of job details.Constant filled by - server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type copy_log_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :ivar account_name: Destination account name. + server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", + "DataBoxCustomerDisk". + :type copy_log_details_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar account_name: Account name. :vartype account_name: str :ivar copy_log_link: Link for copy logs. :vartype copy_log_link: list[str] + :ivar copy_verbose_log_link: Link for copy verbose logs. This will be set only when the + LogCollectionLevel is set to verbose. + :vartype copy_verbose_log_link: list[str] """ _validation = { 'copy_log_details_type': {'required': True}, 'account_name': {'readonly': True}, 'copy_log_link': {'readonly': True}, + 'copy_verbose_log_link': {'readonly': True}, } _attribute_map = { 'copy_log_details_type': {'key': 'copyLogDetailsType', 'type': 'str'}, 'account_name': {'key': 'accountName', 'type': 'str'}, 'copy_log_link': {'key': 'copyLogLink', 'type': '[str]'}, + 'copy_verbose_log_link': {'key': 'copyVerboseLogLink', 'type': '[str]'}, } def __init__( @@ -1152,6 +1688,7 @@ def __init__( self.copy_log_details_type = 'DataBoxHeavy' # type: str self.account_name = None self.copy_log_link = None + self.copy_verbose_log_link = None class DataBoxHeavyJobDetails(JobDetails): @@ -1161,69 +1698,83 @@ class DataBoxHeavyJobDetails(JobDetails): All required parameters must be populated in order to send to Azure. - :param expected_data_size_in_terabytes: The expected size of the data, which needs to be - transferred in this job, in terabytes. - :type expected_data_size_in_terabytes: int :ivar job_stages: List of stages that run in the job. - :vartype job_stages: list[~azure.mgmt.databox.models.JobStages] + :vartype job_stages: list[~data_box_management_client.models.JobStages] :param contact_details: Required. Contact details for notification and shipping. - :type contact_details: ~azure.mgmt.databox.models.ContactDetails - :param shipping_address: Required. Shipping address of the customer. - :type shipping_address: ~azure.mgmt.databox.models.ShippingAddress + :type contact_details: ~data_box_management_client.models.ContactDetails + :param shipping_address: Shipping address of the customer. + :type shipping_address: ~data_box_management_client.models.ShippingAddress :ivar delivery_package: Delivery package shipping details. - :vartype delivery_package: ~azure.mgmt.databox.models.PackageShippingDetails + :vartype delivery_package: ~data_box_management_client.models.PackageShippingDetails :ivar return_package: Return package shipping details. - :vartype return_package: ~azure.mgmt.databox.models.PackageShippingDetails - :param destination_account_details: Required. Destination account details. - :type destination_account_details: list[~azure.mgmt.databox.models.DestinationAccountDetails] - :ivar error_details: Error details for failure. This is optional. - :vartype error_details: list[~azure.mgmt.databox.models.JobErrorDetails] + :vartype return_package: ~data_box_management_client.models.PackageShippingDetails + :param data_import_details: Details of the data to be imported into azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param data_export_details: Details of the data to be exported from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] :param job_details_type: Required. Indicates the type of job details.Constant filled by server. - Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator + Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", "DataBoxCustomerDisk". + :type job_details_type: str or ~data_box_management_client.models.ClassDiscriminator :param preferences: Preferences for the order. - :type preferences: ~azure.mgmt.databox.models.Preferences + :type preferences: ~data_box_management_client.models.Preferences :ivar copy_log_details: List of copy log details. - :vartype copy_log_details: list[~azure.mgmt.databox.models.CopyLogDetails] + :vartype copy_log_details: list[~data_box_management_client.models.CopyLogDetails] :ivar reverse_shipment_label_sas_key: Shared access key to download the return shipment label. :vartype reverse_shipment_label_sas_key: str :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. :vartype chain_of_custody_sas_key: str + :param key_encryption_key: Details about which key encryption type is being used. + :type key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey + :param expected_data_size_in_tera_bytes: The expected size of the data, which needs to be + transferred in this job, in terabytes. + :type expected_data_size_in_tera_bytes: int + :ivar actions: Available actions on the job. + :vartype actions: list[str or ~data_box_management_client.models.CustomerResolutionCode] + :ivar last_mitigation_action_on_job: Last mitigation action performed on the job. + :vartype last_mitigation_action_on_job: + ~data_box_management_client.models.LastMitigationActionOnJob :ivar copy_progress: Copy progress per account. - :vartype copy_progress: list[~azure.mgmt.databox.models.CopyProgress] - :param device_password: Set Device password for unlocking Databox Heavy. + :vartype copy_progress: list[~data_box_management_client.models.CopyProgress] + :param device_password: Set Device password for unlocking Databox Heavy. Should not be passed + for TransferType:ExportFromAzure jobs. If this is not passed, the service will generate + password itself. This will not be returned in Get Call. Password Requirements : Password must + be minimum of 12 and maximum of 64 characters. Password must have at least one uppercase + alphabet, one number and one special character. Password cannot have the following characters : + IilLoO0 Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. :type device_password: str """ _validation = { 'job_stages': {'readonly': True}, 'contact_details': {'required': True}, - 'shipping_address': {'required': True}, 'delivery_package': {'readonly': True}, 'return_package': {'readonly': True}, - 'destination_account_details': {'required': True}, - 'error_details': {'readonly': True}, 'job_details_type': {'required': True}, 'copy_log_details': {'readonly': True}, 'reverse_shipment_label_sas_key': {'readonly': True}, 'chain_of_custody_sas_key': {'readonly': True}, + 'actions': {'readonly': True}, + 'last_mitigation_action_on_job': {'readonly': True}, 'copy_progress': {'readonly': True}, } _attribute_map = { - 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, 'preferences': {'key': 'preferences', 'type': 'Preferences'}, 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, + 'expected_data_size_in_tera_bytes': {'key': 'expectedDataSizeInTeraBytes', 'type': 'int'}, + 'actions': {'key': 'actions', 'type': '[str]'}, + 'last_mitigation_action_on_job': {'key': 'lastMitigationActionOnJob', 'type': 'LastMitigationActionOnJob'}, 'copy_progress': {'key': 'copyProgress', 'type': '[CopyProgress]'}, 'device_password': {'key': 'devicePassword', 'type': 'str'}, } @@ -1246,22 +1797,28 @@ class DataBoxHeavyJobSecrets(JobSecrets): All required parameters must be populated in order to send to Azure. :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant - filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_secrets_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :param dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. - :type dc_access_security_code: ~azure.mgmt.databox.models.DcAccessSecurityCode + filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", + "DataBoxCustomerDisk". + :type job_secrets_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. + :vartype dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar error: Error while fetching the secrets. + :vartype error: ~data_box_management_client.models.CloudError :ivar cabinet_pod_secrets: Contains the list of secret objects for a databox heavy job. - :vartype cabinet_pod_secrets: list[~azure.mgmt.databox.models.DataBoxHeavySecret] + :vartype cabinet_pod_secrets: list[~data_box_management_client.models.DataBoxHeavySecret] """ _validation = { 'job_secrets_type': {'required': True}, + 'dc_access_security_code': {'readonly': True}, + 'error': {'readonly': True}, 'cabinet_pod_secrets': {'readonly': True}, } _attribute_map = { 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'cabinet_pod_secrets': {'key': 'cabinetPodSecrets', 'type': '[DataBoxHeavySecret]'}, } @@ -1284,12 +1841,14 @@ class DataBoxHeavySecret(msrest.serialization.Model): :ivar device_password: Password for out of the box experience on device. :vartype device_password: str :ivar network_configurations: Network configuration of the appliance. - :vartype network_configurations: list[~azure.mgmt.databox.models.ApplianceNetworkConfiguration] + :vartype network_configurations: + list[~data_box_management_client.models.ApplianceNetworkConfiguration] :ivar encoded_validation_cert_pub_key: The base 64 encoded public key to authenticate with the device. :vartype encoded_validation_cert_pub_key: str :ivar account_credential_details: Per account level access credentials. - :vartype account_credential_details: list[~azure.mgmt.databox.models.AccountCredentialDetails] + :vartype account_credential_details: + list[~data_box_management_client.models.AccountCredentialDetails] """ _validation = { @@ -1327,69 +1886,83 @@ class DataBoxJobDetails(JobDetails): All required parameters must be populated in order to send to Azure. - :param expected_data_size_in_terabytes: The expected size of the data, which needs to be - transferred in this job, in terabytes. - :type expected_data_size_in_terabytes: int :ivar job_stages: List of stages that run in the job. - :vartype job_stages: list[~azure.mgmt.databox.models.JobStages] + :vartype job_stages: list[~data_box_management_client.models.JobStages] :param contact_details: Required. Contact details for notification and shipping. - :type contact_details: ~azure.mgmt.databox.models.ContactDetails - :param shipping_address: Required. Shipping address of the customer. - :type shipping_address: ~azure.mgmt.databox.models.ShippingAddress + :type contact_details: ~data_box_management_client.models.ContactDetails + :param shipping_address: Shipping address of the customer. + :type shipping_address: ~data_box_management_client.models.ShippingAddress :ivar delivery_package: Delivery package shipping details. - :vartype delivery_package: ~azure.mgmt.databox.models.PackageShippingDetails + :vartype delivery_package: ~data_box_management_client.models.PackageShippingDetails :ivar return_package: Return package shipping details. - :vartype return_package: ~azure.mgmt.databox.models.PackageShippingDetails - :param destination_account_details: Required. Destination account details. - :type destination_account_details: list[~azure.mgmt.databox.models.DestinationAccountDetails] - :ivar error_details: Error details for failure. This is optional. - :vartype error_details: list[~azure.mgmt.databox.models.JobErrorDetails] + :vartype return_package: ~data_box_management_client.models.PackageShippingDetails + :param data_import_details: Details of the data to be imported into azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param data_export_details: Details of the data to be exported from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] :param job_details_type: Required. Indicates the type of job details.Constant filled by server. - Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator + Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", "DataBoxCustomerDisk". + :type job_details_type: str or ~data_box_management_client.models.ClassDiscriminator :param preferences: Preferences for the order. - :type preferences: ~azure.mgmt.databox.models.Preferences + :type preferences: ~data_box_management_client.models.Preferences :ivar copy_log_details: List of copy log details. - :vartype copy_log_details: list[~azure.mgmt.databox.models.CopyLogDetails] + :vartype copy_log_details: list[~data_box_management_client.models.CopyLogDetails] :ivar reverse_shipment_label_sas_key: Shared access key to download the return shipment label. :vartype reverse_shipment_label_sas_key: str :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. :vartype chain_of_custody_sas_key: str + :param key_encryption_key: Details about which key encryption type is being used. + :type key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey + :param expected_data_size_in_tera_bytes: The expected size of the data, which needs to be + transferred in this job, in terabytes. + :type expected_data_size_in_tera_bytes: int + :ivar actions: Available actions on the job. + :vartype actions: list[str or ~data_box_management_client.models.CustomerResolutionCode] + :ivar last_mitigation_action_on_job: Last mitigation action performed on the job. + :vartype last_mitigation_action_on_job: + ~data_box_management_client.models.LastMitigationActionOnJob :ivar copy_progress: Copy progress per storage account. - :vartype copy_progress: list[~azure.mgmt.databox.models.CopyProgress] - :param device_password: Set Device password for unlocking Databox. + :vartype copy_progress: list[~data_box_management_client.models.CopyProgress] + :param device_password: Set Device password for unlocking Databox. Should not be passed for + TransferType:ExportFromAzure jobs. If this is not passed, the service will generate password + itself. This will not be returned in Get Call. Password Requirements : Password must be + minimum of 12 and maximum of 64 characters. Password must have at least one uppercase alphabet, + one number and one special character. Password cannot have the following characters : IilLoO0 + Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. :type device_password: str """ _validation = { 'job_stages': {'readonly': True}, 'contact_details': {'required': True}, - 'shipping_address': {'required': True}, 'delivery_package': {'readonly': True}, 'return_package': {'readonly': True}, - 'destination_account_details': {'required': True}, - 'error_details': {'readonly': True}, 'job_details_type': {'required': True}, 'copy_log_details': {'readonly': True}, 'reverse_shipment_label_sas_key': {'readonly': True}, 'chain_of_custody_sas_key': {'readonly': True}, + 'actions': {'readonly': True}, + 'last_mitigation_action_on_job': {'readonly': True}, 'copy_progress': {'readonly': True}, } _attribute_map = { - 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, 'preferences': {'key': 'preferences', 'type': 'Preferences'}, 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, + 'expected_data_size_in_tera_bytes': {'key': 'expectedDataSizeInTeraBytes', 'type': 'int'}, + 'actions': {'key': 'actions', 'type': '[str]'}, + 'last_mitigation_action_on_job': {'key': 'lastMitigationActionOnJob', 'type': 'LastMitigationActionOnJob'}, 'copy_progress': {'key': 'copyProgress', 'type': '[CopyProgress]'}, 'device_password': {'key': 'devicePassword', 'type': 'str'}, } @@ -1407,24 +1980,32 @@ def __init__( class DataboxJobSecrets(JobSecrets): """The secrets related to a databox job. + Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant - filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_secrets_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :param dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. - :type dc_access_security_code: ~azure.mgmt.databox.models.DcAccessSecurityCode + filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", + "DataBoxCustomerDisk". + :type job_secrets_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. + :vartype dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar error: Error while fetching the secrets. + :vartype error: ~data_box_management_client.models.CloudError :param pod_secrets: Contains the list of secret objects for a job. - :type pod_secrets: list[~azure.mgmt.databox.models.DataBoxSecret] + :type pod_secrets: list[~data_box_management_client.models.DataBoxSecret] """ _validation = { 'job_secrets_type': {'required': True}, + 'dc_access_security_code': {'readonly': True}, + 'error': {'readonly': True}, } _attribute_map = { 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'pod_secrets': {'key': 'podSecrets', 'type': '[DataBoxSecret]'}, } @@ -1445,13 +2026,15 @@ class ScheduleAvailabilityRequest(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param storage_location: Required. Location for data transfer. - For locations check: https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api- - version=2018-01-01. + :param storage_location: Required. Location for data transfer. For locations check: + https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. :type storage_location: str :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by - server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type sku_name: str or ~azure.mgmt.databox.models.SkuName + server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", + "DataBoxCustomerDisk". + :type sku_name: str or ~data_box_management_client.models.SkuName + :param country: Country in which storage location should be supported. + :type country: str """ _validation = { @@ -1462,6 +2045,7 @@ class ScheduleAvailabilityRequest(msrest.serialization.Model): _attribute_map = { 'storage_location': {'key': 'storageLocation', 'type': 'str'}, 'sku_name': {'key': 'skuName', 'type': 'str'}, + 'country': {'key': 'country', 'type': 'str'}, } _subtype_map = { @@ -1475,6 +2059,7 @@ def __init__( super(ScheduleAvailabilityRequest, self).__init__(**kwargs) self.storage_location = kwargs['storage_location'] self.sku_name = None # type: Optional[str] + self.country = kwargs.get('country', None) class DataBoxScheduleAvailabilityRequest(ScheduleAvailabilityRequest): @@ -1482,13 +2067,15 @@ class DataBoxScheduleAvailabilityRequest(ScheduleAvailabilityRequest): All required parameters must be populated in order to send to Azure. - :param storage_location: Required. Location for data transfer. - For locations check: https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api- - version=2018-01-01. + :param storage_location: Required. Location for data transfer. For locations check: + https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. :type storage_location: str :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by - server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type sku_name: str or ~azure.mgmt.databox.models.SkuName + server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", + "DataBoxCustomerDisk". + :type sku_name: str or ~data_box_management_client.models.SkuName + :param country: Country in which storage location should be supported. + :type country: str """ _validation = { @@ -1499,6 +2086,7 @@ class DataBoxScheduleAvailabilityRequest(ScheduleAvailabilityRequest): _attribute_map = { 'storage_location': {'key': 'storageLocation', 'type': 'str'}, 'sku_name': {'key': 'skuName', 'type': 'str'}, + 'country': {'key': 'country', 'type': 'str'}, } def __init__( @@ -1519,12 +2107,14 @@ class DataBoxSecret(msrest.serialization.Model): :ivar device_password: Password for out of the box experience on device. :vartype device_password: str :ivar network_configurations: Network configuration of the appliance. - :vartype network_configurations: list[~azure.mgmt.databox.models.ApplianceNetworkConfiguration] + :vartype network_configurations: + list[~data_box_management_client.models.ApplianceNetworkConfiguration] :ivar encoded_validation_cert_pub_key: The base 64 encoded public key to authenticate with the device. :vartype encoded_validation_cert_pub_key: str :ivar account_credential_details: Per account level access credentials. - :vartype account_credential_details: list[~azure.mgmt.databox.models.AccountCredentialDetails] + :vartype account_credential_details: + list[~data_box_management_client.models.AccountCredentialDetails] """ _validation = { @@ -1555,247 +2145,290 @@ def __init__( self.account_credential_details = None -class DataDestinationDetailsValidationRequest(ValidationInputRequest): - """Request to validate data destination details. +class DatacenterAddressResponse(msrest.serialization.Model): + """Datacenter address for given storage location. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: DatacenterAddressInstructionResponse, DatacenterAddressLocationResponse. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", - "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator - :param destination_account_details: Required. Destination account details list. - :type destination_account_details: list[~azure.mgmt.databox.models.DestinationAccountDetails] - :param location: Required. Location of stamp or geo. - :type location: str + :param datacenter_address_type: Required. Data center address type.Constant filled by server. + Possible values include: "DatacenterAddressLocation", "DatacenterAddressInstruction". + :type datacenter_address_type: str or ~data_box_management_client.models.DatacenterAddressType + :ivar supported_carriers_for_return_shipment: List of supported carriers for return shipment. + :vartype supported_carriers_for_return_shipment: list[str] """ _validation = { - 'validation_type': {'required': True}, - 'destination_account_details': {'required': True}, - 'location': {'required': True}, + 'datacenter_address_type': {'required': True}, + 'supported_carriers_for_return_shipment': {'readonly': True}, } _attribute_map = { - 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'location': {'key': 'location', 'type': 'str'}, + 'datacenter_address_type': {'key': 'datacenterAddressType', 'type': 'str'}, + 'supported_carriers_for_return_shipment': {'key': 'supportedCarriersForReturnShipment', 'type': '[str]'}, + } + + _subtype_map = { + 'datacenter_address_type': {'DatacenterAddressInstruction': 'DatacenterAddressInstructionResponse', 'DatacenterAddressLocation': 'DatacenterAddressLocationResponse'} } def __init__( self, **kwargs ): - super(DataDestinationDetailsValidationRequest, self).__init__(**kwargs) - self.validation_type = 'ValidateDataDestinationDetails' # type: str - self.destination_account_details = kwargs['destination_account_details'] - self.location = kwargs['location'] + super(DatacenterAddressResponse, self).__init__(**kwargs) + self.datacenter_address_type = None # type: Optional[str] + self.supported_carriers_for_return_shipment = None -class DataDestinationDetailsValidationResponseProperties(ValidationInputResponse): - """Properties of data destination details validation response. +class DatacenterAddressInstructionResponse(DatacenterAddressResponse): + """Datacenter address for given storage location. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", - "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator - :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error - :ivar status: Data destination details validation status. Possible values include: "Valid", - "Invalid", "Skipped". - :vartype status: str or ~azure.mgmt.databox.models.ValidationStatus + :param datacenter_address_type: Required. Data center address type.Constant filled by server. + Possible values include: "DatacenterAddressLocation", "DatacenterAddressInstruction". + :type datacenter_address_type: str or ~data_box_management_client.models.DatacenterAddressType + :ivar supported_carriers_for_return_shipment: List of supported carriers for return shipment. + :vartype supported_carriers_for_return_shipment: list[str] + :ivar communication_instruction: Data center communication instruction. + :vartype communication_instruction: str """ _validation = { - 'validation_type': {'required': True}, - 'error': {'readonly': True}, - 'status': {'readonly': True}, + 'datacenter_address_type': {'required': True}, + 'supported_carriers_for_return_shipment': {'readonly': True}, + 'communication_instruction': {'readonly': True}, } _attribute_map = { - 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, - 'status': {'key': 'status', 'type': 'str'}, + 'datacenter_address_type': {'key': 'datacenterAddressType', 'type': 'str'}, + 'supported_carriers_for_return_shipment': {'key': 'supportedCarriersForReturnShipment', 'type': '[str]'}, + 'communication_instruction': {'key': 'communicationInstruction', 'type': 'str'}, } def __init__( self, **kwargs ): - super(DataDestinationDetailsValidationResponseProperties, self).__init__(**kwargs) - self.validation_type = 'ValidateDataDestinationDetails' # type: str - self.status = None + super(DatacenterAddressInstructionResponse, self).__init__(**kwargs) + self.datacenter_address_type = 'DatacenterAddressInstruction' # type: str + self.communication_instruction = None -class DcAccessSecurityCode(msrest.serialization.Model): - """Dc Access Security code for device. +class DatacenterAddressLocationResponse(DatacenterAddressResponse): + """Datacenter address for given storage location. - :param forward_dc_access_code: Dc Access Code for dispatching from DC. - :type forward_dc_access_code: str - :param reverse_dc_access_code: Dc Access code for dropping off at DC. - :type reverse_dc_access_code: str + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param datacenter_address_type: Required. Data center address type.Constant filled by server. + Possible values include: "DatacenterAddressLocation", "DatacenterAddressInstruction". + :type datacenter_address_type: str or ~data_box_management_client.models.DatacenterAddressType + :ivar supported_carriers_for_return_shipment: List of supported carriers for return shipment. + :vartype supported_carriers_for_return_shipment: list[str] + :ivar contact_person_name: Contact person name. + :vartype contact_person_name: str + :ivar company: Company name. + :vartype company: str + :ivar street1: Street address line 1. + :vartype street1: str + :ivar street2: Street address line 2. + :vartype street2: str + :ivar street3: Street address line 3. + :vartype street3: str + :ivar city: City name. + :vartype city: str + :ivar state: name of the state. + :vartype state: str + :ivar zip: Zip code. + :vartype zip: str + :ivar country: name of the country. + :vartype country: str + :ivar phone: Phone number. + :vartype phone: str + :ivar phone_extension: Phone extension. + :vartype phone_extension: str + :ivar address_type: Address type. + :vartype address_type: str + :ivar additional_shipping_information: Special instruction for shipping. + :vartype additional_shipping_information: str """ + _validation = { + 'datacenter_address_type': {'required': True}, + 'supported_carriers_for_return_shipment': {'readonly': True}, + 'contact_person_name': {'readonly': True}, + 'company': {'readonly': True}, + 'street1': {'readonly': True}, + 'street2': {'readonly': True}, + 'street3': {'readonly': True}, + 'city': {'readonly': True}, + 'state': {'readonly': True}, + 'zip': {'readonly': True}, + 'country': {'readonly': True}, + 'phone': {'readonly': True}, + 'phone_extension': {'readonly': True}, + 'address_type': {'readonly': True}, + 'additional_shipping_information': {'readonly': True}, + } + _attribute_map = { - 'forward_dc_access_code': {'key': 'forwardDcAccessCode', 'type': 'str'}, - 'reverse_dc_access_code': {'key': 'reverseDcAccessCode', 'type': 'str'}, + 'datacenter_address_type': {'key': 'datacenterAddressType', 'type': 'str'}, + 'supported_carriers_for_return_shipment': {'key': 'supportedCarriersForReturnShipment', 'type': '[str]'}, + 'contact_person_name': {'key': 'contactPersonName', 'type': 'str'}, + 'company': {'key': 'company', 'type': 'str'}, + 'street1': {'key': 'street1', 'type': 'str'}, + 'street2': {'key': 'street2', 'type': 'str'}, + 'street3': {'key': 'street3', 'type': 'str'}, + 'city': {'key': 'city', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'zip': {'key': 'zip', 'type': 'str'}, + 'country': {'key': 'country', 'type': 'str'}, + 'phone': {'key': 'phone', 'type': 'str'}, + 'phone_extension': {'key': 'phoneExtension', 'type': 'str'}, + 'address_type': {'key': 'addressType', 'type': 'str'}, + 'additional_shipping_information': {'key': 'additionalShippingInformation', 'type': 'str'}, } def __init__( self, **kwargs ): - super(DcAccessSecurityCode, self).__init__(**kwargs) - self.forward_dc_access_code = kwargs.get('forward_dc_access_code', None) - self.reverse_dc_access_code = kwargs.get('reverse_dc_access_code', None) - - -class DestinationAccountDetails(msrest.serialization.Model): - """Details of the destination storage accounts. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DestinationManagedDiskDetails, DestinationStorageAccountDetails. + super(DatacenterAddressLocationResponse, self).__init__(**kwargs) + self.datacenter_address_type = 'DatacenterAddressLocation' # type: str + self.contact_person_name = None + self.company = None + self.street1 = None + self.street2 = None + self.street3 = None + self.city = None + self.state = None + self.zip = None + self.country = None + self.phone = None + self.phone_extension = None + self.address_type = None + self.additional_shipping_information = None + + +class DatacenterAddressRequest(msrest.serialization.Model): + """Request body to get the datacenter address. All required parameters must be populated in order to send to Azure. - :param data_destination_type: Required. Data Destination Type.Constant filled by server. - Possible values include: "StorageAccount", "ManagedDisk". - :type data_destination_type: str or ~azure.mgmt.databox.models.DataDestinationType - :param account_id: Arm Id of the destination where the data has to be moved. - :type account_id: str - :param share_password: Share password to be shared by all shares in SA. - :type share_password: str + :param storage_location: Required. Storage location. For locations check: + https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. + :type storage_location: str + :param sku_name: Required. Sku Name for which the data center address requested. Possible + values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", "DataBoxCustomerDisk". + :type sku_name: str or ~data_box_management_client.models.SkuName """ _validation = { - 'data_destination_type': {'required': True}, + 'storage_location': {'required': True}, + 'sku_name': {'required': True}, } _attribute_map = { - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, - 'account_id': {'key': 'accountId', 'type': 'str'}, - 'share_password': {'key': 'sharePassword', 'type': 'str'}, - } - - _subtype_map = { - 'data_destination_type': {'ManagedDisk': 'DestinationManagedDiskDetails', 'StorageAccount': 'DestinationStorageAccountDetails'} + 'storage_location': {'key': 'storageLocation', 'type': 'str'}, + 'sku_name': {'key': 'skuName', 'type': 'str'}, } def __init__( self, **kwargs ): - super(DestinationAccountDetails, self).__init__(**kwargs) - self.data_destination_type = None # type: Optional[str] - self.account_id = kwargs.get('account_id', None) - self.share_password = kwargs.get('share_password', None) + super(DatacenterAddressRequest, self).__init__(**kwargs) + self.storage_location = kwargs['storage_location'] + self.sku_name = kwargs['sku_name'] -class DestinationManagedDiskDetails(DestinationAccountDetails): - """Details for the destination compute disks. +class DataExportDetails(msrest.serialization.Model): + """Details of the data to be used for exporting data from azure. All required parameters must be populated in order to send to Azure. - :param data_destination_type: Required. Data Destination Type.Constant filled by server. - Possible values include: "StorageAccount", "ManagedDisk". - :type data_destination_type: str or ~azure.mgmt.databox.models.DataDestinationType - :param account_id: Arm Id of the destination where the data has to be moved. - :type account_id: str - :param share_password: Share password to be shared by all shares in SA. - :type share_password: str - :param resource_group_id: Required. Destination Resource Group Id where the Compute disks - should be created. - :type resource_group_id: str - :param staging_storage_account_id: Required. Arm Id of the storage account that can be used to - copy the vhd for staging. - :type staging_storage_account_id: str + :param transfer_configuration: Required. Configuration for the data transfer. + :type transfer_configuration: ~data_box_management_client.models.TransferConfiguration + :param log_collection_level: Level of the logs to be collected. Possible values include: + "Error", "Verbose". Default value: "Error". + :type log_collection_level: str or ~data_box_management_client.models.LogCollectionLevel + :param account_details: Required. Account details of the data to be transferred. + :type account_details: ~data_box_management_client.models.DataAccountDetails """ _validation = { - 'data_destination_type': {'required': True}, - 'resource_group_id': {'required': True}, - 'staging_storage_account_id': {'required': True}, + 'transfer_configuration': {'required': True}, + 'account_details': {'required': True}, } _attribute_map = { - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, - 'account_id': {'key': 'accountId', 'type': 'str'}, - 'share_password': {'key': 'sharePassword', 'type': 'str'}, - 'resource_group_id': {'key': 'resourceGroupId', 'type': 'str'}, - 'staging_storage_account_id': {'key': 'stagingStorageAccountId', 'type': 'str'}, + 'transfer_configuration': {'key': 'transferConfiguration', 'type': 'TransferConfiguration'}, + 'log_collection_level': {'key': 'logCollectionLevel', 'type': 'str'}, + 'account_details': {'key': 'accountDetails', 'type': 'DataAccountDetails'}, } def __init__( self, **kwargs ): - super(DestinationManagedDiskDetails, self).__init__(**kwargs) - self.data_destination_type = 'ManagedDisk' # type: str - self.resource_group_id = kwargs['resource_group_id'] - self.staging_storage_account_id = kwargs['staging_storage_account_id'] + super(DataExportDetails, self).__init__(**kwargs) + self.transfer_configuration = kwargs['transfer_configuration'] + self.log_collection_level = kwargs.get('log_collection_level', "Error") + self.account_details = kwargs['account_details'] -class DestinationStorageAccountDetails(DestinationAccountDetails): - """Details for the destination storage account. +class DataImportDetails(msrest.serialization.Model): + """Details of the data to be used for importing data to azure. All required parameters must be populated in order to send to Azure. - :param data_destination_type: Required. Data Destination Type.Constant filled by server. - Possible values include: "StorageAccount", "ManagedDisk". - :type data_destination_type: str or ~azure.mgmt.databox.models.DataDestinationType - :param account_id: Arm Id of the destination where the data has to be moved. - :type account_id: str - :param share_password: Share password to be shared by all shares in SA. - :type share_password: str - :param storage_account_id: Required. Destination Storage Account Arm Id. - :type storage_account_id: str + :param account_details: Required. Account details of the data to be transferred. + :type account_details: ~data_box_management_client.models.DataAccountDetails """ _validation = { - 'data_destination_type': {'required': True}, - 'storage_account_id': {'required': True}, + 'account_details': {'required': True}, } _attribute_map = { - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, - 'account_id': {'key': 'accountId', 'type': 'str'}, - 'share_password': {'key': 'sharePassword', 'type': 'str'}, - 'storage_account_id': {'key': 'storageAccountId', 'type': 'str'}, + 'account_details': {'key': 'accountDetails', 'type': 'DataAccountDetails'}, } def __init__( self, **kwargs ): - super(DestinationStorageAccountDetails, self).__init__(**kwargs) - self.data_destination_type = 'StorageAccount' # type: str - self.storage_account_id = kwargs['storage_account_id'] + super(DataImportDetails, self).__init__(**kwargs) + self.account_details = kwargs['account_details'] -class DestinationToServiceLocationMap(msrest.serialization.Model): - """Map of destination location to service location. +class DataLocationToServiceLocationMap(msrest.serialization.Model): + """Map of data location to service location. Variables are only populated by the server, and will be ignored when sending a request. - :ivar destination_location: Location of the destination. - :vartype destination_location: str + :ivar data_location: Location of the data. + :vartype data_location: str :ivar service_location: Location of the service. :vartype service_location: str """ _validation = { - 'destination_location': {'readonly': True}, + 'data_location': {'readonly': True}, 'service_location': {'readonly': True}, } _attribute_map = { - 'destination_location': {'key': 'destinationLocation', 'type': 'str'}, + 'data_location': {'key': 'dataLocation', 'type': 'str'}, 'service_location': {'key': 'serviceLocation', 'type': 'str'}, } @@ -1803,38 +2436,182 @@ def __init__( self, **kwargs ): - super(DestinationToServiceLocationMap, self).__init__(**kwargs) - self.destination_location = None + super(DataLocationToServiceLocationMap, self).__init__(**kwargs) + self.data_location = None self.service_location = None +class DataTransferDetailsValidationRequest(ValidationInputRequest): + """Request to validate export and import data details. + + All required parameters must be populated in order to send to Azure. + + :param validation_type: Required. Identifies the type of validation request.Constant filled by + server. Possible values include: "ValidateAddress", + "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator + :param data_export_details: List of DataTransfer details to be used to export data from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] + :param data_import_details: List of DataTransfer details to be used to import data to azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param device_type: Required. Device type. Possible values include: "DataBox", "DataBoxDisk", + "DataBoxHeavy", "DataBoxCustomerDisk". + :type device_type: str or ~data_box_management_client.models.SkuName + :param transfer_type: Required. Type of the transfer. Possible values include: "ImportToAzure", + "ExportFromAzure". + :type transfer_type: str or ~data_box_management_client.models.TransferType + """ + + _validation = { + 'validation_type': {'required': True}, + 'device_type': {'required': True}, + 'transfer_type': {'required': True}, + } + + _attribute_map = { + 'validation_type': {'key': 'validationType', 'type': 'str'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'device_type': {'key': 'deviceType', 'type': 'str'}, + 'transfer_type': {'key': 'transferType', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DataTransferDetailsValidationRequest, self).__init__(**kwargs) + self.validation_type = 'ValidateDataTransferDetails' # type: str + self.data_export_details = kwargs.get('data_export_details', None) + self.data_import_details = kwargs.get('data_import_details', None) + self.device_type = kwargs['device_type'] + self.transfer_type = kwargs['transfer_type'] + + +class DataTransferDetailsValidationResponseProperties(ValidationInputResponse): + """Properties of data transfer details validation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param validation_type: Required. Identifies the type of validation response.Constant filled by + server. Possible values include: "ValidateAddress", + "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator + :ivar error: Error code and message of validation response. + :vartype error: ~data_box_management_client.models.CloudError + :ivar status: Data transfer details validation status. Possible values include: "Valid", + "Invalid", "Skipped". + :vartype status: str or ~data_box_management_client.models.ValidationStatus + """ + + _validation = { + 'validation_type': {'required': True}, + 'error': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'validation_type': {'key': 'validationType', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'CloudError'}, + 'status': {'key': 'status', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DataTransferDetailsValidationResponseProperties, self).__init__(**kwargs) + self.validation_type = 'ValidateDataTransferDetails' # type: str + self.status = None + + +class DcAccessSecurityCode(msrest.serialization.Model): + """Dc access security code. + + :param reverse_dc_access_code: Reverse Dc access security code. + :type reverse_dc_access_code: str + :param forward_dc_access_code: Forward Dc access security code. + :type forward_dc_access_code: str + """ + + _attribute_map = { + 'reverse_dc_access_code': {'key': 'reverseDCAccessCode', 'type': 'str'}, + 'forward_dc_access_code': {'key': 'forwardDCAccessCode', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DcAccessSecurityCode, self).__init__(**kwargs) + self.reverse_dc_access_code = kwargs.get('reverse_dc_access_code', None) + self.forward_dc_access_code = kwargs.get('forward_dc_access_code', None) + + +class Details(msrest.serialization.Model): + """Details. + + All required parameters must be populated in order to send to Azure. + + :param code: Required. + :type code: str + :param message: Required. + :type message: str + """ + + _validation = { + 'code': {'required': True}, + 'message': {'required': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Details, self).__init__(**kwargs) + self.code = kwargs['code'] + self.message = kwargs['message'] + + class DiskScheduleAvailabilityRequest(ScheduleAvailabilityRequest): """Request body to get the availability for scheduling disk orders. All required parameters must be populated in order to send to Azure. - :param storage_location: Required. Location for data transfer. - For locations check: https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api- - version=2018-01-01. + :param storage_location: Required. Location for data transfer. For locations check: + https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. :type storage_location: str :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by - server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type sku_name: str or ~azure.mgmt.databox.models.SkuName - :param expected_data_size_in_terabytes: Required. The expected size of the data, which needs to - be transferred in this job, in terabytes. - :type expected_data_size_in_terabytes: int + server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", + "DataBoxCustomerDisk". + :type sku_name: str or ~data_box_management_client.models.SkuName + :param country: Country in which storage location should be supported. + :type country: str + :param expected_data_size_in_tera_bytes: Required. The expected size of the data, which needs + to be transferred in this job, in terabytes. + :type expected_data_size_in_tera_bytes: int """ _validation = { 'storage_location': {'required': True}, 'sku_name': {'required': True}, - 'expected_data_size_in_terabytes': {'required': True}, + 'expected_data_size_in_tera_bytes': {'required': True}, } _attribute_map = { 'storage_location': {'key': 'storageLocation', 'type': 'str'}, 'sku_name': {'key': 'skuName', 'type': 'str'}, - 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, + 'country': {'key': 'country', 'type': 'str'}, + 'expected_data_size_in_tera_bytes': {'key': 'expectedDataSizeInTeraBytes', 'type': 'int'}, } def __init__( @@ -1843,7 +2620,7 @@ def __init__( ): super(DiskScheduleAvailabilityRequest, self).__init__(**kwargs) self.sku_name = 'DataBoxDisk' # type: str - self.expected_data_size_in_terabytes = kwargs['expected_data_size_in_terabytes'] + self.expected_data_size_in_tera_bytes = kwargs['expected_data_size_in_tera_bytes'] class DiskSecret(msrest.serialization.Model): @@ -1877,34 +2654,129 @@ def __init__( self.bit_locker_key = None -class Error(msrest.serialization.Model): - """Top level error for the job. +class EncryptionPreferences(msrest.serialization.Model): + """Preferences related to the Encryption. - Variables are only populated by the server, and will be ignored when sending a request. + :param double_encryption: Defines secondary layer of software-based encryption enablement. + Possible values include: "Enabled", "Disabled". Default value: "Disabled". + :type double_encryption: str or ~data_box_management_client.models.DoubleEncryption + """ - :ivar code: Error code that can be used to programmatically identify the error. - :vartype code: str - :ivar message: Describes the error in detail and provides debugging information. - :vartype message: str + _attribute_map = { + 'double_encryption': {'key': 'doubleEncryption', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(EncryptionPreferences, self).__init__(**kwargs) + self.double_encryption = kwargs.get('double_encryption', "Disabled") + + +class ErrorDetail(msrest.serialization.Model): + """ErrorDetail. + + All required parameters must be populated in order to send to Azure. + + :param code: Required. + :type code: str + :param message: Required. + :type message: str + :param details: + :type details: list[~data_box_management_client.models.Details] + :param target: + :type target: str """ _validation = { - 'code': {'readonly': True}, - 'message': {'readonly': True}, + 'code': {'required': True}, + 'message': {'required': True}, } _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[Details]'}, + 'target': {'key': 'target', 'type': 'str'}, } def __init__( self, **kwargs ): - super(Error, self).__init__(**kwargs) - self.code = None - self.message = None + super(ErrorDetail, self).__init__(**kwargs) + self.code = kwargs['code'] + self.message = kwargs['message'] + self.details = kwargs.get('details', None) + self.target = kwargs.get('target', None) + + +class ExportDiskDetails(msrest.serialization.Model): + """Export disk details. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar manifest_file: Manifest file of drive. + :vartype manifest_file: str + :ivar manifest_hash: Manifest file of drive. + :vartype manifest_hash: str + :ivar disk_hash: Hash of the disk. + :vartype disk_hash: str + """ + + _validation = { + 'manifest_file': {'readonly': True}, + 'manifest_hash': {'readonly': True}, + 'disk_hash': {'readonly': True}, + } + + _attribute_map = { + 'manifest_file': {'key': 'manifestFile', 'type': 'str'}, + 'manifest_hash': {'key': 'manifestHash', 'type': 'str'}, + 'disk_hash': {'key': 'diskHash', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ExportDiskDetails, self).__init__(**kwargs) + self.manifest_file = None + self.manifest_hash = None + self.disk_hash = None + + +class FilterFileDetails(msrest.serialization.Model): + """Details of the filter files to be used for data transfer. + + All required parameters must be populated in order to send to Azure. + + :param filter_file_type: Required. Type of the filter file. Possible values include: + "AzureBlob", "AzureFile". + :type filter_file_type: str or ~data_box_management_client.models.FilterFileType + :param filter_file_path: Required. Path of the file that contains the details of all items to + transfer. + :type filter_file_path: str + """ + + _validation = { + 'filter_file_type': {'required': True}, + 'filter_file_path': {'required': True}, + } + + _attribute_map = { + 'filter_file_type': {'key': 'filterFileType', 'type': 'str'}, + 'filter_file_path': {'key': 'filterFilePath', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(FilterFileDetails, self).__init__(**kwargs) + self.filter_file_type = kwargs['filter_file_type'] + self.filter_file_path = kwargs['filter_file_path'] class HeavyScheduleAvailabilityRequest(ScheduleAvailabilityRequest): @@ -1912,13 +2784,15 @@ class HeavyScheduleAvailabilityRequest(ScheduleAvailabilityRequest): All required parameters must be populated in order to send to Azure. - :param storage_location: Required. Location for data transfer. - For locations check: https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api- - version=2018-01-01. + :param storage_location: Required. Location for data transfer. For locations check: + https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. :type storage_location: str :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by - server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type sku_name: str or ~azure.mgmt.databox.models.SkuName + server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", + "DataBoxCustomerDisk". + :type sku_name: str or ~data_box_management_client.models.SkuName + :param country: Country in which storage location should be supported. + :type country: str """ _validation = { @@ -1929,6 +2803,7 @@ class HeavyScheduleAvailabilityRequest(ScheduleAvailabilityRequest): _attribute_map = { 'storage_location': {'key': 'storageLocation', 'type': 'str'}, 'sku_name': {'key': 'skuName', 'type': 'str'}, + 'country': {'key': 'country', 'type': 'str'}, } def __init__( @@ -1939,63 +2814,86 @@ def __init__( self.sku_name = 'DataBoxHeavy' # type: str -class JobDeliveryInfo(msrest.serialization.Model): - """Additional delivery info. +class IdentityProperties(msrest.serialization.Model): + """Managed identity properties. - :param scheduled_date_time: Scheduled date time. - :type scheduled_date_time: ~datetime.datetime + :param type: Managed service identity type. + :type type: str + :param user_assigned: User assigned identity properties. + :type user_assigned: ~data_box_management_client.models.UserAssignedProperties """ _attribute_map = { - 'scheduled_date_time': {'key': 'scheduledDateTime', 'type': 'iso-8601'}, + 'type': {'key': 'type', 'type': 'str'}, + 'user_assigned': {'key': 'userAssigned', 'type': 'UserAssignedProperties'}, } def __init__( self, **kwargs ): - super(JobDeliveryInfo, self).__init__(**kwargs) - self.scheduled_date_time = kwargs.get('scheduled_date_time', None) + super(IdentityProperties, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + self.user_assigned = kwargs.get('user_assigned', None) -class JobErrorDetails(msrest.serialization.Model): - """Job Error Details for providing the information and recommended action. +class ImportDiskDetails(msrest.serialization.Model): + """Import disk details. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar error_message: Message for the error. - :vartype error_message: str - :ivar error_code: Code for the error. - :vartype error_code: int - :ivar recommended_action: Recommended action for the error. - :vartype recommended_action: str - :ivar exception_message: Contains the non localized exception message. - :vartype exception_message: str + :param manifest_file: Required. Manifest file of drive. + :type manifest_file: str + :param manifest_hash: Required. Manifest file of drive. + :type manifest_hash: str + :param bit_locker_key: Required. BitLocker key of drive. + :type bit_locker_key: str + :param disk_hash: Required. Hash of the disk. + :type disk_hash: str """ _validation = { - 'error_message': {'readonly': True}, - 'error_code': {'readonly': True}, - 'recommended_action': {'readonly': True}, - 'exception_message': {'readonly': True}, + 'manifest_file': {'required': True}, + 'manifest_hash': {'required': True}, + 'bit_locker_key': {'required': True}, + 'disk_hash': {'required': True}, } _attribute_map = { - 'error_message': {'key': 'errorMessage', 'type': 'str'}, - 'error_code': {'key': 'errorCode', 'type': 'int'}, - 'recommended_action': {'key': 'recommendedAction', 'type': 'str'}, - 'exception_message': {'key': 'exceptionMessage', 'type': 'str'}, + 'manifest_file': {'key': 'manifestFile', 'type': 'str'}, + 'manifest_hash': {'key': 'manifestHash', 'type': 'str'}, + 'bit_locker_key': {'key': 'bitLockerKey', 'type': 'str'}, + 'disk_hash': {'key': 'diskHash', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ImportDiskDetails, self).__init__(**kwargs) + self.manifest_file = kwargs['manifest_file'] + self.manifest_hash = kwargs['manifest_hash'] + self.bit_locker_key = kwargs['bit_locker_key'] + self.disk_hash = kwargs['disk_hash'] + + +class JobDeliveryInfo(msrest.serialization.Model): + """Additional delivery info. + + :param scheduled_date_time: Scheduled date time. + :type scheduled_date_time: ~datetime.datetime + """ + + _attribute_map = { + 'scheduled_date_time': {'key': 'scheduledDateTime', 'type': 'iso-8601'}, } def __init__( self, **kwargs ): - super(JobErrorDetails, self).__init__(**kwargs) - self.error_message = None - self.error_code = None - self.recommended_action = None - self.exception_message = None + super(JobDeliveryInfo, self).__init__(**kwargs) + self.scheduled_date_time = kwargs.get('scheduled_date_time', None) class Resource(msrest.serialization.Model): @@ -2012,7 +2910,9 @@ class Resource(msrest.serialization.Model): can be used in viewing and grouping this resource (across resource groups). :type tags: dict[str, str] :param sku: Required. The sku type. - :type sku: ~azure.mgmt.databox.models.Sku + :type sku: ~data_box_management_client.models.Sku + :param identity: Msi identity of the resource. + :type identity: ~data_box_management_client.models.ResourceIdentity """ _validation = { @@ -2024,6 +2924,7 @@ class Resource(msrest.serialization.Model): 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'sku': {'key': 'sku', 'type': 'Sku'}, + 'identity': {'key': 'identity', 'type': 'ResourceIdentity'}, } def __init__( @@ -2034,6 +2935,7 @@ def __init__( self.location = kwargs['location'] self.tags = kwargs.get('tags', None) self.sku = kwargs['sku'] + self.identity = kwargs.get('identity', None) class JobResource(Resource): @@ -2052,38 +2954,47 @@ class JobResource(Resource): can be used in viewing and grouping this resource (across resource groups). :type tags: dict[str, str] :param sku: Required. The sku type. - :type sku: ~azure.mgmt.databox.models.Sku + :type sku: ~data_box_management_client.models.Sku + :param identity: Msi identity of the resource. + :type identity: ~data_box_management_client.models.ResourceIdentity :ivar name: Name of the object. :vartype name: str :ivar id: Id of the object. :vartype id: str :ivar type: Type of the object. :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~data_box_management_client.models.SystemData + :param transfer_type: Required. Type of the data transfer. Possible values include: + "ImportToAzure", "ExportFromAzure". + :type transfer_type: str or ~data_box_management_client.models.TransferType :ivar is_cancellable: Describes whether the job is cancellable or not. :vartype is_cancellable: bool :ivar is_deletable: Describes whether the job is deletable or not. :vartype is_deletable: bool :ivar is_shipping_address_editable: Describes whether the shipping address is editable or not. :vartype is_shipping_address_editable: bool + :ivar is_prepare_to_ship_enabled: Is Prepare To Ship Enabled on this job. + :vartype is_prepare_to_ship_enabled: bool :ivar status: Name of the stage which is in progress. Possible values include: "DeviceOrdered", "DevicePrepared", "Dispatched", "Delivered", "PickedUp", "AtAzureDC", "DataCopy", "Completed", "CompletedWithErrors", "Cancelled", "Failed_IssueReportedAtCustomer", "Failed_IssueDetectedAtAzureDC", "Aborted", "CompletedWithWarnings", - "ReadyToDispatchFromAzureDC", "ReadyToReceiveAtAzureDC". - :vartype status: str or ~azure.mgmt.databox.models.StageName + "ReadyToDispatchFromAzureDC", "ReadyToReceiveAtAzureDC", "Created", "Shipping", "Packaging". + :vartype status: str or ~data_box_management_client.models.StageName :ivar start_time: Time at which the job was started in UTC ISO 8601 format. :vartype start_time: ~datetime.datetime :ivar error: Top level error for the job. - :vartype error: ~azure.mgmt.databox.models.Error + :vartype error: ~data_box_management_client.models.CloudError :param details: Details of a job run. This field will only be sent for expand details filter. - :type details: ~azure.mgmt.databox.models.JobDetails + :type details: ~data_box_management_client.models.JobDetails :ivar cancellation_reason: Reason for cancellation. :vartype cancellation_reason: str :param delivery_type: Delivery type of Job. Possible values include: "NonScheduled", - "Scheduled". - :type delivery_type: str or ~azure.mgmt.databox.models.JobDeliveryType + "Scheduled". Default value: "NonScheduled". + :type delivery_type: str or ~data_box_management_client.models.JobDeliveryType :param delivery_info: Delivery Info of Job. - :type delivery_info: ~azure.mgmt.databox.models.JobDeliveryInfo + :type delivery_info: ~data_box_management_client.models.JobDeliveryInfo :ivar is_cancellable_without_fee: Flag to indicate cancellation of scheduled job. :vartype is_cancellable_without_fee: bool """ @@ -2094,9 +3005,12 @@ class JobResource(Resource): 'name': {'readonly': True}, 'id': {'readonly': True}, 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'transfer_type': {'required': True}, 'is_cancellable': {'readonly': True}, 'is_deletable': {'readonly': True}, 'is_shipping_address_editable': {'readonly': True}, + 'is_prepare_to_ship_enabled': {'readonly': True}, 'status': {'readonly': True}, 'start_time': {'readonly': True}, 'error': {'readonly': True}, @@ -2108,15 +3022,19 @@ class JobResource(Resource): 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'sku': {'key': 'sku', 'type': 'Sku'}, + 'identity': {'key': 'identity', 'type': 'ResourceIdentity'}, 'name': {'key': 'name', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'transfer_type': {'key': 'properties.transferType', 'type': 'str'}, 'is_cancellable': {'key': 'properties.isCancellable', 'type': 'bool'}, 'is_deletable': {'key': 'properties.isDeletable', 'type': 'bool'}, 'is_shipping_address_editable': {'key': 'properties.isShippingAddressEditable', 'type': 'bool'}, + 'is_prepare_to_ship_enabled': {'key': 'properties.isPrepareToShipEnabled', 'type': 'bool'}, 'status': {'key': 'properties.status', 'type': 'str'}, 'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'}, - 'error': {'key': 'properties.error', 'type': 'Error'}, + 'error': {'key': 'properties.error', 'type': 'CloudError'}, 'details': {'key': 'properties.details', 'type': 'JobDetails'}, 'cancellation_reason': {'key': 'properties.cancellationReason', 'type': 'str'}, 'delivery_type': {'key': 'properties.deliveryType', 'type': 'str'}, @@ -2132,15 +3050,18 @@ def __init__( self.name = None self.id = None self.type = None + self.system_data = None + self.transfer_type = kwargs['transfer_type'] self.is_cancellable = None self.is_deletable = None self.is_shipping_address_editable = None + self.is_prepare_to_ship_enabled = None self.status = None self.start_time = None self.error = None self.details = kwargs.get('details', None) self.cancellation_reason = None - self.delivery_type = kwargs.get('delivery_type', None) + self.delivery_type = kwargs.get('delivery_type', "NonScheduled") self.delivery_info = kwargs.get('delivery_info', None) self.is_cancellable_without_fee = None @@ -2149,106 +3070,268 @@ class JobResourceList(msrest.serialization.Model): """Job Resource Collection. :param value: List of job resources. - :type value: list[~azure.mgmt.databox.models.JobResource] + :type value: list[~data_box_management_client.models.JobResource] :param next_link: Link for the next set of job resources. :type next_link: str """ _attribute_map = { - 'value': {'key': 'value', 'type': '[JobResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[JobResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(JobResourceList, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = kwargs.get('next_link', None) + + +class JobResourceUpdateParameter(msrest.serialization.Model): + """The JobResourceUpdateParameter. + + :param tags: A set of tags. The list of key value pairs that describe the resource. These tags + can be used in viewing and grouping this resource (across resource groups). + :type tags: dict[str, str] + :param identity: Msi identity of the resource. + :type identity: ~data_box_management_client.models.ResourceIdentity + :param details: Details of a job to be updated. + :type details: ~data_box_management_client.models.UpdateJobDetails + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + 'identity': {'key': 'identity', 'type': 'ResourceIdentity'}, + 'details': {'key': 'properties.details', 'type': 'UpdateJobDetails'}, + } + + def __init__( + self, + **kwargs + ): + super(JobResourceUpdateParameter, self).__init__(**kwargs) + self.tags = kwargs.get('tags', None) + self.identity = kwargs.get('identity', None) + self.details = kwargs.get('details', None) + + +class JobStages(msrest.serialization.Model): + """Job stages. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar stage_name: Name of the job stage. Possible values include: "DeviceOrdered", + "DevicePrepared", "Dispatched", "Delivered", "PickedUp", "AtAzureDC", "DataCopy", "Completed", + "CompletedWithErrors", "Cancelled", "Failed_IssueReportedAtCustomer", + "Failed_IssueDetectedAtAzureDC", "Aborted", "CompletedWithWarnings", + "ReadyToDispatchFromAzureDC", "ReadyToReceiveAtAzureDC", "Created", "Shipping", "Packaging". + :vartype stage_name: str or ~data_box_management_client.models.StageName + :ivar display_name: Display name of the job stage. + :vartype display_name: str + :ivar stage_status: Status of the job stage. Possible values include: "None", "InProgress", + "Succeeded", "Failed", "Cancelled", "Cancelling", "SucceededWithErrors", + "WaitingForCustomerAction", "SucceededWithWarnings", "WaitingForCustomerActionForKek", + "WaitingForCustomerActionForCleanUp", "CustomerActionPerformedForCleanUp". + :vartype stage_status: str or ~data_box_management_client.models.StageStatus + :ivar stage_time: Time for the job stage in UTC ISO 8601 format. + :vartype stage_time: ~datetime.datetime + :ivar job_stage_details: Job Stage Details. + :vartype job_stage_details: object + """ + + _validation = { + 'stage_name': {'readonly': True}, + 'display_name': {'readonly': True}, + 'stage_status': {'readonly': True}, + 'stage_time': {'readonly': True}, + 'job_stage_details': {'readonly': True}, + } + + _attribute_map = { + 'stage_name': {'key': 'stageName', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'stage_status': {'key': 'stageStatus', 'type': 'str'}, + 'stage_time': {'key': 'stageTime', 'type': 'iso-8601'}, + 'job_stage_details': {'key': 'jobStageDetails', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(JobStages, self).__init__(**kwargs) + self.stage_name = None + self.display_name = None + self.stage_status = None + self.stage_time = None + self.job_stage_details = None + + +class KeyEncryptionKey(msrest.serialization.Model): + """Encryption key containing details about key to encrypt different keys. + + All required parameters must be populated in order to send to Azure. + + :param kek_type: Required. Type of encryption key used for key encryption. Possible values + include: "MicrosoftManaged", "CustomerManaged". Default value: "MicrosoftManaged". + :type kek_type: str or ~data_box_management_client.models.KekType + :param identity_properties: Managed identity properties used for key encryption. + :type identity_properties: ~data_box_management_client.models.IdentityProperties + :param kek_url: Key encryption key. It is required in case of Customer managed KekType. + :type kek_url: str + :param kek_vault_resource_id: Kek vault resource id. It is required in case of Customer managed + KekType. + :type kek_vault_resource_id: str + """ + + _validation = { + 'kek_type': {'required': True}, + } + + _attribute_map = { + 'kek_type': {'key': 'kekType', 'type': 'str'}, + 'identity_properties': {'key': 'identityProperties', 'type': 'IdentityProperties'}, + 'kek_url': {'key': 'kekUrl', 'type': 'str'}, + 'kek_vault_resource_id': {'key': 'kekVaultResourceID', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(KeyEncryptionKey, self).__init__(**kwargs) + self.kek_type = kwargs.get('kek_type', "MicrosoftManaged") + self.identity_properties = kwargs.get('identity_properties', None) + self.kek_url = kwargs.get('kek_url', None) + self.kek_vault_resource_id = kwargs.get('kek_vault_resource_id', None) + + +class LastMitigationActionOnJob(msrest.serialization.Model): + """Last Mitigation Action Performed On Job. + + :param action_date_time_in_utc: Action performed date time. + :type action_date_time_in_utc: ~datetime.datetime + :param is_performed_by_customer: Action performed by customer, + possibility is that mitigation might happen by customer or service or by ops. + :type is_performed_by_customer: bool + :param customer_resolution: Resolution code provided by customer. Possible values include: + "None", "MoveToCleanUpDevice", "Resume". + :type customer_resolution: str or ~data_box_management_client.models.CustomerResolutionCode + """ + + _attribute_map = { + 'action_date_time_in_utc': {'key': 'actionDateTimeInUtc', 'type': 'iso-8601'}, + 'is_performed_by_customer': {'key': 'isPerformedByCustomer', 'type': 'bool'}, + 'customer_resolution': {'key': 'customerResolution', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(LastMitigationActionOnJob, self).__init__(**kwargs) + self.action_date_time_in_utc = kwargs.get('action_date_time_in_utc', None) + self.is_performed_by_customer = kwargs.get('is_performed_by_customer', None) + self.customer_resolution = kwargs.get('customer_resolution', None) + + +class ManagedDiskDetails(DataAccountDetails): + """Details of the managed disks. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Account Type of the data to be transferred.Constant filled + by server. Possible values include: "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param share_password: Password for all the shares to be created on the device. Should not be + passed for TransferType:ExportFromAzure jobs. If this is not passed, the service will generate + password itself. This will not be returned in Get Call. Password Requirements : Password must + be minimum of 12 and maximum of 64 characters. Password must have at least one uppercase + alphabet, one number and one special character. Password cannot have the following characters : + IilLoO0 Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. + :type share_password: str + :param resource_group_id: Required. Resource Group Id of the compute disks. + :type resource_group_id: str + :param staging_storage_account_id: Required. Resource Id of the storage account that can be + used to copy the vhd for staging. + :type staging_storage_account_id: str + """ + + _validation = { + 'data_account_type': {'required': True}, + 'resource_group_id': {'required': True}, + 'staging_storage_account_id': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'share_password': {'key': 'sharePassword', 'type': 'str'}, + 'resource_group_id': {'key': 'resourceGroupId', 'type': 'str'}, + 'staging_storage_account_id': {'key': 'stagingStorageAccountId', 'type': 'str'}, } def __init__( self, **kwargs ): - super(JobResourceList, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.next_link = kwargs.get('next_link', None) + super(ManagedDiskDetails, self).__init__(**kwargs) + self.data_account_type = 'ManagedDisk' # type: str + self.resource_group_id = kwargs['resource_group_id'] + self.staging_storage_account_id = kwargs['staging_storage_account_id'] -class JobResourceUpdateParameter(msrest.serialization.Model): - """The JobResourceUpdateParameter. +class MarkDevicesShippedRequest(msrest.serialization.Model): + """The request body to provide the delivery package details of job. - :param tags: A set of tags. The list of key value pairs that describe the resource. These tags - can be used in viewing and grouping this resource (across resource groups). - :type tags: dict[str, str] - :param details: Details of a job to be updated. - :type details: ~azure.mgmt.databox.models.UpdateJobDetails - :param destination_account_details: Destination account details. - :type destination_account_details: list[~azure.mgmt.databox.models.DestinationAccountDetails] + All required parameters must be populated in order to send to Azure. + + :param delivery_package_details: Required. Delivery package details. + :type delivery_package_details: ~data_box_management_client.models.PackageCarrierInfo """ + _validation = { + 'delivery_package_details': {'required': True}, + } + _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, - 'details': {'key': 'properties.details', 'type': 'UpdateJobDetails'}, - 'destination_account_details': {'key': 'properties.destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, + 'delivery_package_details': {'key': 'deliveryPackageDetails', 'type': 'PackageCarrierInfo'}, } def __init__( self, **kwargs ): - super(JobResourceUpdateParameter, self).__init__(**kwargs) - self.tags = kwargs.get('tags', None) - self.details = kwargs.get('details', None) - self.destination_account_details = kwargs.get('destination_account_details', None) + super(MarkDevicesShippedRequest, self).__init__(**kwargs) + self.delivery_package_details = kwargs['delivery_package_details'] -class JobStages(msrest.serialization.Model): - """Job stages. +class MitigateJobRequest(msrest.serialization.Model): + """The Mitigate Job captured from request body for Mitigate API. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar stage_name: Name of the job stage. Possible values include: "DeviceOrdered", - "DevicePrepared", "Dispatched", "Delivered", "PickedUp", "AtAzureDC", "DataCopy", "Completed", - "CompletedWithErrors", "Cancelled", "Failed_IssueReportedAtCustomer", - "Failed_IssueDetectedAtAzureDC", "Aborted", "CompletedWithWarnings", - "ReadyToDispatchFromAzureDC", "ReadyToReceiveAtAzureDC". - :vartype stage_name: str or ~azure.mgmt.databox.models.StageName - :ivar display_name: Display name of the job stage. - :vartype display_name: str - :ivar stage_status: Status of the job stage. Possible values include: "None", "InProgress", - "Succeeded", "Failed", "Cancelled", "Cancelling", "SucceededWithErrors". - :vartype stage_status: str or ~azure.mgmt.databox.models.StageStatus - :ivar stage_time: Time for the job stage in UTC ISO 8601 format. - :vartype stage_time: ~datetime.datetime - :ivar job_stage_details: Job Stage Details. - :vartype job_stage_details: object - :ivar error_details: Error details for the stage. - :vartype error_details: list[~azure.mgmt.databox.models.JobErrorDetails] + :param customer_resolution_code: Required. Resolution code for the job. Possible values + include: "None", "MoveToCleanUpDevice", "Resume". + :type customer_resolution_code: str or + ~data_box_management_client.models.CustomerResolutionCode """ _validation = { - 'stage_name': {'readonly': True}, - 'display_name': {'readonly': True}, - 'stage_status': {'readonly': True}, - 'stage_time': {'readonly': True}, - 'job_stage_details': {'readonly': True}, - 'error_details': {'readonly': True}, + 'customer_resolution_code': {'required': True}, } _attribute_map = { - 'stage_name': {'key': 'stageName', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'stage_status': {'key': 'stageStatus', 'type': 'str'}, - 'stage_time': {'key': 'stageTime', 'type': 'iso-8601'}, - 'job_stage_details': {'key': 'jobStageDetails', 'type': 'object'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, + 'customer_resolution_code': {'key': 'customerResolutionCode', 'type': 'str'}, } def __init__( self, **kwargs ): - super(JobStages, self).__init__(**kwargs) - self.stage_name = None - self.display_name = None - self.stage_status = None - self.stage_time = None - self.job_stage_details = None - self.error_details = None + super(MitigateJobRequest, self).__init__(**kwargs) + self.customer_resolution_code = kwargs['customer_resolution_code'] class NotificationPreference(msrest.serialization.Model): @@ -2258,7 +3341,7 @@ class NotificationPreference(msrest.serialization.Model): :param stage_name: Required. Name of the stage. Possible values include: "DevicePrepared", "Dispatched", "Delivered", "PickedUp", "AtAzureDC", "DataCopy". - :type stage_name: str or ~azure.mgmt.databox.models.NotificationStageName + :type stage_name: str or ~data_box_management_client.models.NotificationStageName :param send_notification: Required. Notification is required or not. :type send_notification: bool """ @@ -2279,7 +3362,7 @@ def __init__( ): super(NotificationPreference, self).__init__(**kwargs) self.stage_name = kwargs['stage_name'] - self.send_notification = kwargs['send_notification'] + self.send_notification = kwargs.get('send_notification', True) class Operation(msrest.serialization.Model): @@ -2291,11 +3374,13 @@ class Operation(msrest.serialization.Model): {resourceProviderNamespace}/{resourceType}/{read|write|delete|action}. :vartype name: str :ivar display: Operation display values. - :vartype display: ~azure.mgmt.databox.models.OperationDisplay + :vartype display: ~data_box_management_client.models.OperationDisplay :ivar properties: Operation properties. :vartype properties: object :ivar origin: Origin of the operation. Can be : user|system|user,system. :vartype origin: str + :param is_data_action: Indicates whether the operation is a data action. + :type is_data_action: bool """ _validation = { @@ -2310,6 +3395,7 @@ class Operation(msrest.serialization.Model): 'display': {'key': 'display', 'type': 'OperationDisplay'}, 'properties': {'key': 'properties', 'type': 'object'}, 'origin': {'key': 'origin', 'type': 'str'}, + 'is_data_action': {'key': 'isDataAction', 'type': 'bool'}, } def __init__( @@ -2321,6 +3407,7 @@ def __init__( self.display = None self.properties = None self.origin = None + self.is_data_action = kwargs.get('is_data_action', None) class OperationDisplay(msrest.serialization.Model): @@ -2360,7 +3447,7 @@ class OperationList(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. :ivar value: List of operations. - :vartype value: list[~azure.mgmt.databox.models.Operation] + :vartype value: list[~data_box_management_client.models.Operation] :param next_link: Link for the next set of operations. :type next_link: str """ @@ -2383,29 +3470,77 @@ def __init__( self.next_link = kwargs.get('next_link', None) +class PackageCarrierDetails(msrest.serialization.Model): + """Package carrier details. + + :param carrier_account_number: Carrier Account Number of customer for customer disk. + :type carrier_account_number: str + :param carrier_name: Name of the carrier. + :type carrier_name: str + :param tracking_id: Tracking Id of shipment. + :type tracking_id: str + """ + + _attribute_map = { + 'carrier_account_number': {'key': 'carrierAccountNumber', 'type': 'str'}, + 'carrier_name': {'key': 'carrierName', 'type': 'str'}, + 'tracking_id': {'key': 'trackingId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PackageCarrierDetails, self).__init__(**kwargs) + self.carrier_account_number = kwargs.get('carrier_account_number', None) + self.carrier_name = kwargs.get('carrier_name', None) + self.tracking_id = kwargs.get('tracking_id', None) + + +class PackageCarrierInfo(msrest.serialization.Model): + """package carrier info. + + :param carrier_name: Name of the carrier. + :type carrier_name: str + :param tracking_id: Tracking Id of shipment. + :type tracking_id: str + """ + + _attribute_map = { + 'carrier_name': {'key': 'carrierName', 'type': 'str'}, + 'tracking_id': {'key': 'trackingId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PackageCarrierInfo, self).__init__(**kwargs) + self.carrier_name = kwargs.get('carrier_name', None) + self.tracking_id = kwargs.get('tracking_id', None) + + class PackageShippingDetails(msrest.serialization.Model): - """Shipping details. + """package shipping details. Variables are only populated by the server, and will be ignored when sending a request. - :ivar carrier_name: Name of the carrier. - :vartype carrier_name: str - :ivar tracking_id: Tracking Id of shipment. - :vartype tracking_id: str :ivar tracking_url: Url where shipment can be tracked. :vartype tracking_url: str + :param carrier_name: Name of the carrier. + :type carrier_name: str + :param tracking_id: Tracking Id of shipment. + :type tracking_id: str """ _validation = { - 'carrier_name': {'readonly': True}, - 'tracking_id': {'readonly': True}, 'tracking_url': {'readonly': True}, } _attribute_map = { + 'tracking_url': {'key': 'trackingUrl', 'type': 'str'}, 'carrier_name': {'key': 'carrierName', 'type': 'str'}, 'tracking_id': {'key': 'trackingId', 'type': 'str'}, - 'tracking_url': {'key': 'trackingUrl', 'type': 'str'}, } def __init__( @@ -2413,23 +3548,26 @@ def __init__( **kwargs ): super(PackageShippingDetails, self).__init__(**kwargs) - self.carrier_name = None - self.tracking_id = None self.tracking_url = None + self.carrier_name = kwargs.get('carrier_name', None) + self.tracking_id = kwargs.get('tracking_id', None) class Preferences(msrest.serialization.Model): """Preferences related to the order. - :param preferred_data_center_region: Preferred Data Center Region. + :param preferred_data_center_region: Preferred data center region. :type preferred_data_center_region: list[str] :param transport_preferences: Preferences related to the shipment logistics of the sku. - :type transport_preferences: ~azure.mgmt.databox.models.TransportPreferences + :type transport_preferences: ~data_box_management_client.models.TransportPreferences + :param encryption_preferences: Preferences related to the Encryption. + :type encryption_preferences: ~data_box_management_client.models.EncryptionPreferences """ _attribute_map = { 'preferred_data_center_region': {'key': 'preferredDataCenterRegion', 'type': '[str]'}, 'transport_preferences': {'key': 'transportPreferences', 'type': 'TransportPreferences'}, + 'encryption_preferences': {'key': 'encryptionPreferences', 'type': 'EncryptionPreferences'}, } def __init__( @@ -2439,6 +3577,7 @@ def __init__( super(Preferences, self).__init__(**kwargs) self.preferred_data_center_region = kwargs.get('preferred_data_center_region', None) self.transport_preferences = kwargs.get('transport_preferences', None) + self.encryption_preferences = kwargs.get('encryption_preferences', None) class PreferencesValidationRequest(ValidationInputRequest): @@ -2447,15 +3586,15 @@ class PreferencesValidationRequest(ValidationInputRequest): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator - :param preference: Preference requested with respect to transport type and data center. - :type preference: ~azure.mgmt.databox.models.Preferences + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator + :param preference: Preference of transport and data center. + :type preference: ~data_box_management_client.models.Preferences :param device_type: Required. Device type to be used for the job. Possible values include: - "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type device_type: str or ~azure.mgmt.databox.models.SkuName + "DataBox", "DataBoxDisk", "DataBoxHeavy", "DataBoxCustomerDisk". + :type device_type: str or ~data_box_management_client.models.SkuName """ _validation = { @@ -2487,15 +3626,15 @@ class PreferencesValidationResponseProperties(ValidationInputResponse): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar status: Validation status of requested data center and transport. Possible values include: "Valid", "Invalid", "Skipped". - :vartype status: str or ~azure.mgmt.databox.models.ValidationStatus + :vartype status: str or ~data_box_management_client.models.ValidationStatus """ _validation = { @@ -2506,7 +3645,7 @@ class PreferencesValidationResponseProperties(ValidationInputResponse): _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'status': {'key': 'status', 'type': 'str'}, } @@ -2524,15 +3663,20 @@ class RegionConfigurationRequest(msrest.serialization.Model): :param schedule_availability_request: Request body to get the availability for scheduling orders. - :type schedule_availability_request: ~azure.mgmt.databox.models.ScheduleAvailabilityRequest + :type schedule_availability_request: + ~data_box_management_client.models.ScheduleAvailabilityRequest :param transport_availability_request: Request body to get the transport availability for given sku. - :type transport_availability_request: ~azure.mgmt.databox.models.TransportAvailabilityRequest + :type transport_availability_request: + ~data_box_management_client.models.TransportAvailabilityRequest + :param datacenter_address_request: Request body to get the datacenter address . + :type datacenter_address_request: ~data_box_management_client.models.DatacenterAddressRequest """ _attribute_map = { 'schedule_availability_request': {'key': 'scheduleAvailabilityRequest', 'type': 'ScheduleAvailabilityRequest'}, 'transport_availability_request': {'key': 'transportAvailabilityRequest', 'type': 'TransportAvailabilityRequest'}, + 'datacenter_address_request': {'key': 'datacenterAddressRequest', 'type': 'DatacenterAddressRequest'}, } def __init__( @@ -2542,6 +3686,7 @@ def __init__( super(RegionConfigurationRequest, self).__init__(**kwargs) self.schedule_availability_request = kwargs.get('schedule_availability_request', None) self.transport_availability_request = kwargs.get('transport_availability_request', None) + self.datacenter_address_request = kwargs.get('datacenter_address_request', None) class RegionConfigurationResponse(msrest.serialization.Model): @@ -2551,20 +3696,25 @@ class RegionConfigurationResponse(msrest.serialization.Model): :ivar schedule_availability_response: Schedule availability for given sku in a region. :vartype schedule_availability_response: - ~azure.mgmt.databox.models.ScheduleAvailabilityResponse + ~data_box_management_client.models.ScheduleAvailabilityResponse :ivar transport_availability_response: Transport options available for given sku in a region. :vartype transport_availability_response: - ~azure.mgmt.databox.models.TransportAvailabilityResponse + ~data_box_management_client.models.TransportAvailabilityResponse + :ivar datacenter_address_response: Datacenter address for given sku in a region. + :vartype datacenter_address_response: + ~data_box_management_client.models.DatacenterAddressResponse """ _validation = { 'schedule_availability_response': {'readonly': True}, 'transport_availability_response': {'readonly': True}, + 'datacenter_address_response': {'readonly': True}, } _attribute_map = { 'schedule_availability_response': {'key': 'scheduleAvailabilityResponse', 'type': 'ScheduleAvailabilityResponse'}, 'transport_availability_response': {'key': 'transportAvailabilityResponse', 'type': 'TransportAvailabilityResponse'}, + 'datacenter_address_response': {'key': 'datacenterAddressResponse', 'type': 'DatacenterAddressResponse'}, } def __init__( @@ -2574,10 +3724,50 @@ def __init__( super(RegionConfigurationResponse, self).__init__(**kwargs) self.schedule_availability_response = None self.transport_availability_response = None + self.datacenter_address_response = None + + +class ResourceIdentity(msrest.serialization.Model): + """Msi identity details of the resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param type: Identity type. + :type type: str + :ivar principal_id: Service Principal Id backing the Msi. + :vartype principal_id: str + :ivar tenant_id: Home Tenant Id. + :vartype tenant_id: str + :param user_assigned_identities: User Assigned Identities. + :type user_assigned_identities: dict[str, + ~data_box_management_client.models.UserAssignedIdentity] + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'}, + } + + def __init__( + self, + **kwargs + ): + super(ResourceIdentity, self).__init__(**kwargs) + self.type = kwargs.get('type', "None") + self.principal_id = None + self.tenant_id = None + self.user_assigned_identities = kwargs.get('user_assigned_identities', None) class ScheduleAvailabilityResponse(msrest.serialization.Model): - """Schedule availability response for given sku in a region. + """Schedule availability for given sku in a region. Variables are only populated by the server, and will be ignored when sending a request. @@ -2610,13 +3800,14 @@ class ShareCredentialDetails(msrest.serialization.Model): :vartype share_name: str :ivar share_type: Type of the share. Possible values include: "UnknownType", "HCS", "BlockBlob", "PageBlob", "AzureFile", "ManagedDisk". - :vartype share_type: str or ~azure.mgmt.databox.models.ShareDestinationFormatType + :vartype share_type: str or ~data_box_management_client.models.ShareDestinationFormatType :ivar user_name: User name for the share. :vartype user_name: str :ivar password: Password for the share. :vartype password: str :ivar supported_access_protocols: Access protocols supported on the device. - :vartype supported_access_protocols: list[str or ~azure.mgmt.databox.models.AccessProtocol] + :vartype supported_access_protocols: list[str or + ~data_box_management_client.models.AccessProtocol] """ _validation = { @@ -2732,21 +3923,20 @@ class ShippingAddress(msrest.serialization.Model): :type state_or_province: str :param country: Required. Name of the Country. :type country: str - :param postal_code: Required. Postal code. + :param postal_code: Postal code. :type postal_code: str :param zip_extended_code: Extended Zip Code. :type zip_extended_code: str :param company_name: Name of the company. :type company_name: str :param address_type: Type of address. Possible values include: "None", "Residential", - "Commercial". - :type address_type: str or ~azure.mgmt.databox.models.AddressType + "Commercial". Default value: "None". + :type address_type: str or ~data_box_management_client.models.AddressType """ _validation = { 'street_address1': {'required': True}, 'country': {'required': True}, - 'postal_code': {'required': True}, } _attribute_map = { @@ -2773,10 +3963,10 @@ def __init__( self.city = kwargs.get('city', None) self.state_or_province = kwargs.get('state_or_province', None) self.country = kwargs['country'] - self.postal_code = kwargs['postal_code'] + self.postal_code = kwargs.get('postal_code', None) self.zip_extended_code = kwargs.get('zip_extended_code', None) self.company_name = kwargs.get('company_name', None) - self.address_type = kwargs.get('address_type', None) + self.address_type = kwargs.get('address_type', "None") class Sku(msrest.serialization.Model): @@ -2785,8 +3975,8 @@ class Sku(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param name: Required. The sku name. Possible values include: "DataBox", "DataBoxDisk", - "DataBoxHeavy". - :type name: str or ~azure.mgmt.databox.models.SkuName + "DataBoxHeavy", "DataBoxCustomerDisk". + :type name: str or ~data_box_management_client.models.SkuName :param display_name: The display name of the sku. :type display_name: str :param family: The sku family. @@ -2816,20 +4006,19 @@ def __init__( class SkuAvailabilityValidationRequest(ValidationInputRequest): """Request to validate sku availability. - Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :param device_type: Required. Device type to be used for the job. Possible values include: - "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type device_type: str or ~azure.mgmt.databox.models.SkuName - :ivar transfer_type: Required. Type of the transfer. Default value: "ImportToAzure". - :vartype transfer_type: str + "DataBox", "DataBoxDisk", "DataBoxHeavy", "DataBoxCustomerDisk". + :type device_type: str or ~data_box_management_client.models.SkuName + :param transfer_type: Required. Type of the transfer. Possible values include: "ImportToAzure", + "ExportFromAzure". + :type transfer_type: str or ~data_box_management_client.models.TransferType :param country: Required. ISO country code. Country for hardware shipment. For codes check: https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements. :type country: str @@ -2841,7 +4030,7 @@ class SkuAvailabilityValidationRequest(ValidationInputRequest): _validation = { 'validation_type': {'required': True}, 'device_type': {'required': True}, - 'transfer_type': {'required': True, 'constant': True}, + 'transfer_type': {'required': True}, 'country': {'required': True}, 'location': {'required': True}, } @@ -2854,8 +4043,6 @@ class SkuAvailabilityValidationRequest(ValidationInputRequest): 'location': {'key': 'location', 'type': 'str'}, } - transfer_type = "ImportToAzure" - def __init__( self, **kwargs @@ -2863,6 +4050,7 @@ def __init__( super(SkuAvailabilityValidationRequest, self).__init__(**kwargs) self.validation_type = 'ValidateSkuAvailability' # type: str self.device_type = kwargs['device_type'] + self.transfer_type = kwargs['transfer_type'] self.country = kwargs['country'] self.location = kwargs['location'] @@ -2875,15 +4063,15 @@ class SkuAvailabilityValidationResponseProperties(ValidationInputResponse): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar status: Sku availability validation status. Possible values include: "Valid", "Invalid", "Skipped". - :vartype status: str or ~azure.mgmt.databox.models.ValidationStatus + :vartype status: str or ~data_box_management_client.models.ValidationStatus """ _validation = { @@ -2894,7 +4082,7 @@ class SkuAvailabilityValidationResponseProperties(ValidationInputResponse): _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'status': {'key': 'status', 'type': 'str'}, } @@ -2946,16 +4134,22 @@ class SkuCost(msrest.serialization.Model): :vartype meter_id: str :ivar meter_type: The type of the meter. :vartype meter_type: str + :ivar multiplier: Multiplier specifies the region specific value to be multiplied with 1$ guid. + Eg: Our new regions will be using 1$ shipping guid with appropriate multiplier specific to + region. + :vartype multiplier: float """ _validation = { 'meter_id': {'readonly': True}, 'meter_type': {'readonly': True}, + 'multiplier': {'readonly': True}, } _attribute_map = { 'meter_id': {'key': 'meterId', 'type': 'str'}, 'meter_type': {'key': 'meterType', 'type': 'str'}, + 'multiplier': {'key': 'multiplier', 'type': 'float'}, } def __init__( @@ -2965,6 +4159,7 @@ def __init__( super(SkuCost, self).__init__(**kwargs) self.meter_id = None self.meter_type = None + self.multiplier = None class SkuInformation(msrest.serialization.Model): @@ -2973,21 +4168,21 @@ class SkuInformation(msrest.serialization.Model): Variables are only populated by the server, and will be ignored when sending a request. :ivar sku: The Sku. - :vartype sku: ~azure.mgmt.databox.models.Sku + :vartype sku: ~data_box_management_client.models.Sku :ivar enabled: The sku is enabled or not. :vartype enabled: bool - :ivar destination_to_service_location_map: The map of destination location to service location. - :vartype destination_to_service_location_map: - list[~azure.mgmt.databox.models.DestinationToServiceLocationMap] + :ivar data_location_to_service_location_map: The map of data location to service location. + :vartype data_location_to_service_location_map: + list[~data_box_management_client.models.DataLocationToServiceLocationMap] :ivar capacity: Capacity of the Sku. - :vartype capacity: ~azure.mgmt.databox.models.SkuCapacity + :vartype capacity: ~data_box_management_client.models.SkuCapacity :ivar costs: Cost of the Sku. - :vartype costs: list[~azure.mgmt.databox.models.SkuCost] + :vartype costs: list[~data_box_management_client.models.SkuCost] :ivar api_versions: Api versions that support this Sku. :vartype api_versions: list[str] :ivar disabled_reason: Reason why the Sku is disabled. Possible values include: "None", "Country", "Region", "Feature", "OfferType", "NoSubscriptionInfo". - :vartype disabled_reason: str or ~azure.mgmt.databox.models.SkuDisabledReason + :vartype disabled_reason: str or ~data_box_management_client.models.SkuDisabledReason :ivar disabled_reason_message: Message for why the Sku is disabled. :vartype disabled_reason_message: str :ivar required_feature: Required feature to access the sku. @@ -2997,7 +4192,7 @@ class SkuInformation(msrest.serialization.Model): _validation = { 'sku': {'readonly': True}, 'enabled': {'readonly': True}, - 'destination_to_service_location_map': {'readonly': True}, + 'data_location_to_service_location_map': {'readonly': True}, 'capacity': {'readonly': True}, 'costs': {'readonly': True}, 'api_versions': {'readonly': True}, @@ -3009,7 +4204,7 @@ class SkuInformation(msrest.serialization.Model): _attribute_map = { 'sku': {'key': 'sku', 'type': 'Sku'}, 'enabled': {'key': 'enabled', 'type': 'bool'}, - 'destination_to_service_location_map': {'key': 'properties.destinationToServiceLocationMap', 'type': '[DestinationToServiceLocationMap]'}, + 'data_location_to_service_location_map': {'key': 'properties.dataLocationToServiceLocationMap', 'type': '[DataLocationToServiceLocationMap]'}, 'capacity': {'key': 'properties.capacity', 'type': 'SkuCapacity'}, 'costs': {'key': 'properties.costs', 'type': '[SkuCost]'}, 'api_versions': {'key': 'properties.apiVersions', 'type': '[str]'}, @@ -3025,7 +4220,7 @@ def __init__( super(SkuInformation, self).__init__(**kwargs) self.sku = None self.enabled = None - self.destination_to_service_location_map = None + self.data_location_to_service_location_map = None self.capacity = None self.costs = None self.api_versions = None @@ -3034,16 +4229,65 @@ def __init__( self.required_feature = None +class StorageAccountDetails(DataAccountDetails): + """Details for the storage account. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Account Type of the data to be transferred.Constant filled + by server. Possible values include: "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param share_password: Password for all the shares to be created on the device. Should not be + passed for TransferType:ExportFromAzure jobs. If this is not passed, the service will generate + password itself. This will not be returned in Get Call. Password Requirements : Password must + be minimum of 12 and maximum of 64 characters. Password must have at least one uppercase + alphabet, one number and one special character. Password cannot have the following characters : + IilLoO0 Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. + :type share_password: str + :param storage_account_id: Required. Storage Account Resource Id. + :type storage_account_id: str + :param xt_passthrough_storage_account_id: Customer's Storage Account Resource Id of the XT- + passthrough job. + :type xt_passthrough_storage_account_id: str + :param xt_passthrough_storage_account_tenant_id: Customer's Storage Account's Tenant Id of the + XT-passthrough job. + :type xt_passthrough_storage_account_tenant_id: str + """ + + _validation = { + 'data_account_type': {'required': True}, + 'storage_account_id': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'share_password': {'key': 'sharePassword', 'type': 'str'}, + 'storage_account_id': {'key': 'storageAccountId', 'type': 'str'}, + 'xt_passthrough_storage_account_id': {'key': 'xtPassthroughStorageAccountId', 'type': 'str'}, + 'xt_passthrough_storage_account_tenant_id': {'key': 'xtPassthroughStorageAccountTenantId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageAccountDetails, self).__init__(**kwargs) + self.data_account_type = 'StorageAccount' # type: str + self.storage_account_id = kwargs['storage_account_id'] + self.xt_passthrough_storage_account_id = kwargs.get('xt_passthrough_storage_account_id', None) + self.xt_passthrough_storage_account_tenant_id = kwargs.get('xt_passthrough_storage_account_tenant_id', None) + + class SubscriptionIsAllowedToCreateJobValidationRequest(ValidationInputRequest): """Request to validate subscription permission to create jobs. All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator """ _validation = { @@ -3070,15 +4314,15 @@ class SubscriptionIsAllowedToCreateJobValidationResponseProperties(ValidationInp All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error + :vartype error: ~data_box_management_client.models.CloudError :ivar status: Validation status of subscription permission to create job. Possible values include: "Valid", "Invalid", "Skipped". - :vartype status: str or ~azure.mgmt.databox.models.ValidationStatus + :vartype status: str or ~data_box_management_client.models.ValidationStatus """ _validation = { @@ -3089,7 +4333,7 @@ class SubscriptionIsAllowedToCreateJobValidationResponseProperties(ValidationInp _attribute_map = { 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, + 'error': {'key': 'error', 'type': 'CloudError'}, 'status': {'key': 'status', 'type': 'str'}, } @@ -3102,6 +4346,207 @@ def __init__( self.status = None +class SystemData(msrest.serialization.Model): + """Provides details about resource creation and update time. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar created_by: A string identifier for the identity that created the resource. + :vartype created_by: str + :ivar created_by_type: The type of identity that created the resource: user, application, + managedIdentity. + :vartype created_by_type: str + :ivar created_at: The timestamp of resource creation (UTC). + :vartype created_at: ~datetime.datetime + :ivar last_modified_by: A string identifier for the identity that last modified the resource. + :vartype last_modified_by: str + :ivar last_modified_by_type: The type of identity that last modified the resource: user, + application, managedIdentity. + :vartype last_modified_by_type: str + :ivar last_modified_at: The timestamp of resource last modification (UTC). + :vartype last_modified_at: ~datetime.datetime + """ + + _validation = { + 'created_by': {'readonly': True}, + 'created_by_type': {'readonly': True}, + 'created_at': {'readonly': True}, + 'last_modified_by': {'readonly': True}, + 'last_modified_by_type': {'readonly': True}, + 'last_modified_at': {'readonly': True}, + } + + _attribute_map = { + 'created_by': {'key': 'createdBy', 'type': 'str'}, + 'created_by_type': {'key': 'createdByType', 'type': 'str'}, + 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, + 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, + 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, + 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, + } + + def __init__( + self, + **kwargs + ): + super(SystemData, self).__init__(**kwargs) + self.created_by = None + self.created_by_type = None + self.created_at = None + self.last_modified_by = None + self.last_modified_by_type = None + self.last_modified_at = None + + +class TransferAllDetails(msrest.serialization.Model): + """Details to transfer all data. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Type of the account of data. Possible values include: + "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param transfer_all_blobs: To indicate if all Azure blobs have to be transferred. + :type transfer_all_blobs: bool + :param transfer_all_files: To indicate if all Azure Files have to be transferred. + :type transfer_all_files: bool + """ + + _validation = { + 'data_account_type': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'transfer_all_blobs': {'key': 'transferAllBlobs', 'type': 'bool'}, + 'transfer_all_files': {'key': 'transferAllFiles', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(TransferAllDetails, self).__init__(**kwargs) + self.data_account_type = kwargs['data_account_type'] + self.transfer_all_blobs = kwargs.get('transfer_all_blobs', None) + self.transfer_all_files = kwargs.get('transfer_all_files', None) + + +class TransferConfiguration(msrest.serialization.Model): + """Configuration for defining the transfer of data. + + All required parameters must be populated in order to send to Azure. + + :param transfer_configuration_type: Required. Type of the configuration for transfer. Possible + values include: "TransferAll", "TransferUsingFilter". + :type transfer_configuration_type: str or + ~data_box_management_client.models.TransferConfigurationType + :param transfer_filter_details: Map of filter type and the details to filter. This field is + required only if the TransferConfigurationType is given as TransferUsingFilter. + :type transfer_filter_details: + ~data_box_management_client.models.TransferConfigurationTransferFilterDetails + :param transfer_all_details: Map of filter type and the details to transfer all data. This + field is required only if the TransferConfigurationType is given as TransferAll. + :type transfer_all_details: + ~data_box_management_client.models.TransferConfigurationTransferAllDetails + """ + + _validation = { + 'transfer_configuration_type': {'required': True}, + } + + _attribute_map = { + 'transfer_configuration_type': {'key': 'transferConfigurationType', 'type': 'str'}, + 'transfer_filter_details': {'key': 'transferFilterDetails', 'type': 'TransferConfigurationTransferFilterDetails'}, + 'transfer_all_details': {'key': 'transferAllDetails', 'type': 'TransferConfigurationTransferAllDetails'}, + } + + def __init__( + self, + **kwargs + ): + super(TransferConfiguration, self).__init__(**kwargs) + self.transfer_configuration_type = kwargs['transfer_configuration_type'] + self.transfer_filter_details = kwargs.get('transfer_filter_details', None) + self.transfer_all_details = kwargs.get('transfer_all_details', None) + + +class TransferConfigurationTransferAllDetails(msrest.serialization.Model): + """Map of filter type and the details to transfer all data. This field is required only if the TransferConfigurationType is given as TransferAll. + + :param include: Details to transfer all data. + :type include: ~data_box_management_client.models.TransferAllDetails + """ + + _attribute_map = { + 'include': {'key': 'include', 'type': 'TransferAllDetails'}, + } + + def __init__( + self, + **kwargs + ): + super(TransferConfigurationTransferAllDetails, self).__init__(**kwargs) + self.include = kwargs.get('include', None) + + +class TransferConfigurationTransferFilterDetails(msrest.serialization.Model): + """Map of filter type and the details to filter. This field is required only if the TransferConfigurationType is given as TransferUsingFilter. + + :param include: Details of the filtering the transfer of data. + :type include: ~data_box_management_client.models.TransferFilterDetails + """ + + _attribute_map = { + 'include': {'key': 'include', 'type': 'TransferFilterDetails'}, + } + + def __init__( + self, + **kwargs + ): + super(TransferConfigurationTransferFilterDetails, self).__init__(**kwargs) + self.include = kwargs.get('include', None) + + +class TransferFilterDetails(msrest.serialization.Model): + """Details of the filtering the transfer of data. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Type of the account of data. Possible values include: + "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param blob_filter_details: Filter details to transfer blobs. + :type blob_filter_details: ~data_box_management_client.models.BlobFilterDetails + :param azure_file_filter_details: Filter details to transfer Azure files. + :type azure_file_filter_details: ~data_box_management_client.models.AzureFileFilterDetails + :param filter_file_details: Details of the filter files to be used for data transfer. + :type filter_file_details: list[~data_box_management_client.models.FilterFileDetails] + """ + + _validation = { + 'data_account_type': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'blob_filter_details': {'key': 'blobFilterDetails', 'type': 'BlobFilterDetails'}, + 'azure_file_filter_details': {'key': 'azureFileFilterDetails', 'type': 'AzureFileFilterDetails'}, + 'filter_file_details': {'key': 'filterFileDetails', 'type': '[FilterFileDetails]'}, + } + + def __init__( + self, + **kwargs + ): + super(TransferFilterDetails, self).__init__(**kwargs) + self.data_account_type = kwargs['data_account_type'] + self.blob_filter_details = kwargs.get('blob_filter_details', None) + self.azure_file_filter_details = kwargs.get('azure_file_filter_details', None) + self.filter_file_details = kwargs.get('filter_file_details', None) + + class TransportAvailabilityDetails(msrest.serialization.Model): """Transport options availability details for given region. @@ -3109,7 +4554,7 @@ class TransportAvailabilityDetails(msrest.serialization.Model): :ivar shipment_type: Transport Shipment Type supported for given region. Possible values include: "CustomerManaged", "MicrosoftManaged". - :vartype shipment_type: str or ~azure.mgmt.databox.models.TransportShipmentTypes + :vartype shipment_type: str or ~data_box_management_client.models.TransportShipmentTypes """ _validation = { @@ -3132,8 +4577,8 @@ class TransportAvailabilityRequest(msrest.serialization.Model): """Request body to get the transport availability for given sku. :param sku_name: Type of the device. Possible values include: "DataBox", "DataBoxDisk", - "DataBoxHeavy". - :type sku_name: str or ~azure.mgmt.databox.models.SkuName + "DataBoxHeavy", "DataBoxCustomerDisk". + :type sku_name: str or ~data_box_management_client.models.SkuName """ _attribute_map = { @@ -3155,7 +4600,7 @@ class TransportAvailabilityResponse(msrest.serialization.Model): :ivar transport_availability_details: List of transport availability details for given region. :vartype transport_availability_details: - list[~azure.mgmt.databox.models.TransportAvailabilityDetails] + list[~data_box_management_client.models.TransportAvailabilityDetails] """ _validation = { @@ -3181,7 +4626,7 @@ class TransportPreferences(msrest.serialization.Model): :param preferred_shipment_type: Required. Indicates Shipment Logistics type that the customer preferred. Possible values include: "CustomerManaged", "MicrosoftManaged". - :type preferred_shipment_type: str or ~azure.mgmt.databox.models.TransportShipmentTypes + :type preferred_shipment_type: str or ~data_box_management_client.models.TransportShipmentTypes """ _validation = { @@ -3208,7 +4653,7 @@ class UnencryptedCredentials(msrest.serialization.Model): :ivar job_name: Name of the job. :vartype job_name: str :ivar job_secrets: Secrets related to this job. - :vartype job_secrets: ~azure.mgmt.databox.models.JobSecrets + :vartype job_secrets: ~data_box_management_client.models.JobSecrets """ _validation = { @@ -3234,7 +4679,7 @@ class UnencryptedCredentialsList(msrest.serialization.Model): """List of unencrypted credentials for accessing device. :param value: List of unencrypted credentials. - :type value: list[~azure.mgmt.databox.models.UnencryptedCredentials] + :type value: list[~data_box_management_client.models.UnencryptedCredentials] :param next_link: Link for the next set of unencrypted credentials. :type next_link: str """ @@ -3257,14 +4702,21 @@ class UpdateJobDetails(msrest.serialization.Model): """Job details for update. :param contact_details: Contact details for notification and shipping. - :type contact_details: ~azure.mgmt.databox.models.ContactDetails + :type contact_details: ~data_box_management_client.models.ContactDetails :param shipping_address: Shipping address of the customer. - :type shipping_address: ~azure.mgmt.databox.models.ShippingAddress + :type shipping_address: ~data_box_management_client.models.ShippingAddress + :param key_encryption_key: Key encryption key for the job. + :type key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey + :param return_package_details: Return package details of job. This is applicable only for + customer disk sku. + :type return_package_details: ~data_box_management_client.models.PackageCarrierDetails """ _attribute_map = { 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, + 'return_package_details': {'key': 'returnPackageDetails', 'type': 'PackageCarrierDetails'}, } def __init__( @@ -3274,6 +4726,57 @@ def __init__( super(UpdateJobDetails, self).__init__(**kwargs) self.contact_details = kwargs.get('contact_details', None) self.shipping_address = kwargs.get('shipping_address', None) + self.key_encryption_key = kwargs.get('key_encryption_key', None) + self.return_package_details = kwargs.get('return_package_details', None) + + +class UserAssignedIdentity(msrest.serialization.Model): + """Class defining User assigned identity details. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar principal_id: The principal id of user assigned identity. + :vartype principal_id: str + :ivar client_id: The client id of user assigned identity. + :vartype client_id: str + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'client_id': {'readonly': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'client_id': {'key': 'clientId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(UserAssignedIdentity, self).__init__(**kwargs) + self.principal_id = None + self.client_id = None + + +class UserAssignedProperties(msrest.serialization.Model): + """User assigned identity properties. + + :param resource_id: Arm resource id for user assigned identity to be used to fetch MSI token. + :type resource_id: str + """ + + _attribute_map = { + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(UserAssignedProperties, self).__init__(**kwargs) + self.resource_id = kwargs.get('resource_id', None) class ValidateAddress(ValidationInputRequest): @@ -3282,17 +4785,17 @@ class ValidateAddress(ValidationInputRequest): All required parameters must be populated in order to send to Azure. :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", + server. Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator :param shipping_address: Required. Shipping address of the customer. - :type shipping_address: ~azure.mgmt.databox.models.ShippingAddress + :type shipping_address: ~data_box_management_client.models.ShippingAddress :param device_type: Required. Device type to be used for the job. Possible values include: - "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type device_type: str or ~azure.mgmt.databox.models.SkuName + "DataBox", "DataBoxDisk", "DataBoxHeavy", "DataBoxCustomerDisk". + :type device_type: str or ~data_box_management_client.models.SkuName :param transport_preferences: Preferences related to the shipment logistics of the sku. - :type transport_preferences: ~azure.mgmt.databox.models.TransportPreferences + :type transport_preferences: ~data_box_management_client.models.TransportPreferences """ _validation = { @@ -3326,10 +4829,11 @@ class ValidationResponse(msrest.serialization.Model): :ivar status: Overall validation status. Possible values include: "AllValidToProceed", "InputsRevisitRequired", "CertainInputValidationsSkipped". - :vartype status: str or ~azure.mgmt.databox.models.OverallValidationStatus + :vartype status: str or ~data_box_management_client.models.OverallValidationStatus :ivar individual_response_details: List of response details contain validationType and its response as key and value respectively. - :vartype individual_response_details: list[~azure.mgmt.databox.models.ValidationInputResponse] + :vartype individual_response_details: + list[~data_box_management_client.models.ValidationInputResponse] """ _validation = { diff --git a/src/databox/azext_databox/vendored_sdks/databox/models/_models_py3.py b/src/databox/azext_databox/vendored_sdks/databox/models/_models_py3.py new file mode 100644 index 00000000000..c36a0393a71 --- /dev/null +++ b/src/databox/azext_databox/vendored_sdks/databox/models/_models_py3.py @@ -0,0 +1,5141 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +import datetime +from typing import Dict, List, Optional, Union + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + +from ._data_box_management_client_enums import * + + +class AccountCredentialDetails(msrest.serialization.Model): + """Credential details of the account. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar account_name: Name of the account. + :vartype account_name: str + :ivar data_account_type: Type of the account. Possible values include: "StorageAccount", + "ManagedDisk". + :vartype data_account_type: str or ~data_box_management_client.models.DataAccountType + :ivar account_connection_string: Connection string of the account endpoint to use the account + as a storage endpoint on the device. + :vartype account_connection_string: str + :ivar share_credential_details: Per share level unencrypted access credentials. + :vartype share_credential_details: + list[~data_box_management_client.models.ShareCredentialDetails] + """ + + _validation = { + 'account_name': {'readonly': True}, + 'data_account_type': {'readonly': True}, + 'account_connection_string': {'readonly': True}, + 'share_credential_details': {'readonly': True}, + } + + _attribute_map = { + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'account_connection_string': {'key': 'accountConnectionString', 'type': 'str'}, + 'share_credential_details': {'key': 'shareCredentialDetails', 'type': '[ShareCredentialDetails]'}, + } + + def __init__( + self, + **kwargs + ): + super(AccountCredentialDetails, self).__init__(**kwargs) + self.account_name = None + self.data_account_type = None + self.account_connection_string = None + self.share_credential_details = None + + +class AdditionalErrorInfo(msrest.serialization.Model): + """Additional error info. + + :param type: Additional error type. + :type type: str + :param info: Additional error info. + :type info: object + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'info': {'key': 'info', 'type': 'object'}, + } + + def __init__( + self, + *, + type: Optional[str] = None, + info: Optional[object] = None, + **kwargs + ): + super(AdditionalErrorInfo, self).__init__(**kwargs) + self.type = type + self.info = info + + +class AddressValidationOutput(msrest.serialization.Model): + """Output of the address validation api. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param validation_type: Identifies the type of validation response.Constant filled by server. + Possible values include: "ValidateAddress", "ValidateSubscriptionIsAllowedToCreateJob", + "ValidatePreferences", "ValidateCreateOrderLimit", "ValidateSkuAvailability", + "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator + :ivar error: Error code and message of validation response. + :vartype error: ~data_box_management_client.models.CloudError + :ivar validation_status: The address validation status. Possible values include: "Valid", + "Invalid", "Ambiguous". + :vartype validation_status: str or ~data_box_management_client.models.AddressValidationStatus + :ivar alternate_addresses: List of alternate addresses. + :vartype alternate_addresses: list[~data_box_management_client.models.ShippingAddress] + """ + + _validation = { + 'error': {'readonly': True}, + 'validation_status': {'readonly': True}, + 'alternate_addresses': {'readonly': True}, + } + + _attribute_map = { + 'validation_type': {'key': 'properties.validationType', 'type': 'str'}, + 'error': {'key': 'properties.error', 'type': 'CloudError'}, + 'validation_status': {'key': 'properties.validationStatus', 'type': 'str'}, + 'alternate_addresses': {'key': 'properties.alternateAddresses', 'type': '[ShippingAddress]'}, + } + + def __init__( + self, + **kwargs + ): + super(AddressValidationOutput, self).__init__(**kwargs) + self.validation_type = None # type: Optional[str] + self.error = None + self.validation_status = None + self.alternate_addresses = None + + +class ValidationInputResponse(msrest.serialization.Model): + """Minimum properties that should be present in each individual validation response. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AddressValidationProperties, CreateOrderLimitForSubscriptionValidationResponseProperties, DataTransferDetailsValidationResponseProperties, PreferencesValidationResponseProperties, SkuAvailabilityValidationResponseProperties, SubscriptionIsAllowedToCreateJobValidationResponseProperties. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param validation_type: Required. Identifies the type of validation response.Constant filled by + server. Possible values include: "ValidateAddress", + "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator + :ivar error: Error code and message of validation response. + :vartype error: ~data_box_management_client.models.CloudError + """ + + _validation = { + 'validation_type': {'required': True}, + 'error': {'readonly': True}, + } + + _attribute_map = { + 'validation_type': {'key': 'validationType', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'CloudError'}, + } + + _subtype_map = { + 'validation_type': {'ValidateAddress': 'AddressValidationProperties', 'ValidateCreateOrderLimit': 'CreateOrderLimitForSubscriptionValidationResponseProperties', 'ValidateDataTransferDetails': 'DataTransferDetailsValidationResponseProperties', 'ValidatePreferences': 'PreferencesValidationResponseProperties', 'ValidateSkuAvailability': 'SkuAvailabilityValidationResponseProperties', 'ValidateSubscriptionIsAllowedToCreateJob': 'SubscriptionIsAllowedToCreateJobValidationResponseProperties'} + } + + def __init__( + self, + **kwargs + ): + super(ValidationInputResponse, self).__init__(**kwargs) + self.validation_type = None # type: Optional[str] + self.error = None + + +class AddressValidationProperties(ValidationInputResponse): + """The address validation output. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param validation_type: Required. Identifies the type of validation response.Constant filled by + server. Possible values include: "ValidateAddress", + "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator + :ivar error: Error code and message of validation response. + :vartype error: ~data_box_management_client.models.CloudError + :ivar validation_status: The address validation status. Possible values include: "Valid", + "Invalid", "Ambiguous". + :vartype validation_status: str or ~data_box_management_client.models.AddressValidationStatus + :ivar alternate_addresses: List of alternate addresses. + :vartype alternate_addresses: list[~data_box_management_client.models.ShippingAddress] + """ + + _validation = { + 'validation_type': {'required': True}, + 'error': {'readonly': True}, + 'validation_status': {'readonly': True}, + 'alternate_addresses': {'readonly': True}, + } + + _attribute_map = { + 'validation_type': {'key': 'validationType', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'CloudError'}, + 'validation_status': {'key': 'validationStatus', 'type': 'str'}, + 'alternate_addresses': {'key': 'alternateAddresses', 'type': '[ShippingAddress]'}, + } + + def __init__( + self, + **kwargs + ): + super(AddressValidationProperties, self).__init__(**kwargs) + self.validation_type = 'ValidateAddress' # type: str + self.validation_status = None + self.alternate_addresses = None + + +class ApiError(msrest.serialization.Model): + """ApiError. + + All required parameters must be populated in order to send to Azure. + + :param error: Required. + :type error: ~data_box_management_client.models.ErrorDetail + """ + + _validation = { + 'error': {'required': True}, + } + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorDetail'}, + } + + def __init__( + self, + *, + error: "ErrorDetail", + **kwargs + ): + super(ApiError, self).__init__(**kwargs) + self.error = error + + +class ApplianceNetworkConfiguration(msrest.serialization.Model): + """The Network Adapter configuration of a DataBox. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: Name of the network. + :vartype name: str + :ivar mac_address: Mac Address. + :vartype mac_address: str + """ + + _validation = { + 'name': {'readonly': True}, + 'mac_address': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'mac_address': {'key': 'macAddress', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ApplianceNetworkConfiguration, self).__init__(**kwargs) + self.name = None + self.mac_address = None + + +class ArmBaseObject(msrest.serialization.Model): + """Base class for all objects under resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: Name of the object. + :vartype name: str + :ivar id: Id of the object. + :vartype id: str + :ivar type: Type of the object. + :vartype type: str + """ + + _validation = { + 'name': {'readonly': True}, + 'id': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ArmBaseObject, self).__init__(**kwargs) + self.name = None + self.id = None + self.type = None + + +class AvailableSkuRequest(msrest.serialization.Model): + """The filters for showing the available skus. + + All required parameters must be populated in order to send to Azure. + + :param transfer_type: Required. Type of the transfer. Possible values include: "ImportToAzure", + "ExportFromAzure". + :type transfer_type: str or ~data_box_management_client.models.TransferType + :param country: Required. ISO country code. Country for hardware shipment. For codes check: + https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements. + :type country: str + :param location: Required. Location for data transfer. For locations check: + https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. + :type location: str + :param sku_names: Sku Names to filter for available skus. + :type sku_names: list[str or ~data_box_management_client.models.SkuName] + """ + + _validation = { + 'transfer_type': {'required': True}, + 'country': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'transfer_type': {'key': 'transferType', 'type': 'str'}, + 'country': {'key': 'country', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'sku_names': {'key': 'skuNames', 'type': '[str]'}, + } + + def __init__( + self, + *, + transfer_type: Union[str, "TransferType"], + country: str, + location: str, + sku_names: Optional[List[Union[str, "SkuName"]]] = None, + **kwargs + ): + super(AvailableSkuRequest, self).__init__(**kwargs) + self.transfer_type = transfer_type + self.country = country + self.location = location + self.sku_names = sku_names + + +class AvailableSkusResult(msrest.serialization.Model): + """The available skus operation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of available skus. + :vartype value: list[~data_box_management_client.models.SkuInformation] + :param next_link: Link for the next set of skus. + :type next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[SkuInformation]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + next_link: Optional[str] = None, + **kwargs + ): + super(AvailableSkusResult, self).__init__(**kwargs) + self.value = None + self.next_link = next_link + + +class AzureFileFilterDetails(msrest.serialization.Model): + """Filter details to transfer Azure files. + + :param file_prefix_list: Prefix list of the Azure files to be transferred. + :type file_prefix_list: list[str] + :param file_path_list: List of full path of the files to be transferred. + :type file_path_list: list[str] + :param file_share_list: List of file shares to be transferred. + :type file_share_list: list[str] + """ + + _attribute_map = { + 'file_prefix_list': {'key': 'filePrefixList', 'type': '[str]'}, + 'file_path_list': {'key': 'filePathList', 'type': '[str]'}, + 'file_share_list': {'key': 'fileShareList', 'type': '[str]'}, + } + + def __init__( + self, + *, + file_prefix_list: Optional[List[str]] = None, + file_path_list: Optional[List[str]] = None, + file_share_list: Optional[List[str]] = None, + **kwargs + ): + super(AzureFileFilterDetails, self).__init__(**kwargs) + self.file_prefix_list = file_prefix_list + self.file_path_list = file_path_list + self.file_share_list = file_share_list + + +class BlobFilterDetails(msrest.serialization.Model): + """Filter details to transfer Azure Blobs. + + :param blob_prefix_list: Prefix list of the Azure blobs to be transferred. + :type blob_prefix_list: list[str] + :param blob_path_list: List of full path of the blobs to be transferred. + :type blob_path_list: list[str] + :param container_list: List of blob containers to be transferred. + :type container_list: list[str] + """ + + _attribute_map = { + 'blob_prefix_list': {'key': 'blobPrefixList', 'type': '[str]'}, + 'blob_path_list': {'key': 'blobPathList', 'type': '[str]'}, + 'container_list': {'key': 'containerList', 'type': '[str]'}, + } + + def __init__( + self, + *, + blob_prefix_list: Optional[List[str]] = None, + blob_path_list: Optional[List[str]] = None, + container_list: Optional[List[str]] = None, + **kwargs + ): + super(BlobFilterDetails, self).__init__(**kwargs) + self.blob_prefix_list = blob_prefix_list + self.blob_path_list = blob_path_list + self.container_list = container_list + + +class CancellationReason(msrest.serialization.Model): + """Reason for cancellation. + + All required parameters must be populated in order to send to Azure. + + :param reason: Required. Reason for cancellation. + :type reason: str + """ + + _validation = { + 'reason': {'required': True}, + } + + _attribute_map = { + 'reason': {'key': 'reason', 'type': 'str'}, + } + + def __init__( + self, + *, + reason: str, + **kwargs + ): + super(CancellationReason, self).__init__(**kwargs) + self.reason = reason + + +class CloudError(msrest.serialization.Model): + """Cloud error. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param code: Cloud error code. + :type code: str + :param message: Cloud error message. + :type message: str + :param target: Cloud error target. + :type target: str + :ivar details: Cloud error details. + :vartype details: list[~data_box_management_client.models.CloudError] + :ivar additional_info: Cloud error additional info. + :vartype additional_info: list[~data_box_management_client.models.AdditionalErrorInfo] + """ + + _validation = { + 'details': {'readonly': True}, + 'additional_info': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[CloudError]'}, + 'additional_info': {'key': 'additionalInfo', 'type': '[AdditionalErrorInfo]'}, + } + + def __init__( + self, + *, + code: Optional[str] = None, + message: Optional[str] = None, + target: Optional[str] = None, + **kwargs + ): + super(CloudError, self).__init__(**kwargs) + self.code = code + self.message = message + self.target = target + self.details = None + self.additional_info = None + + +class ContactDetails(msrest.serialization.Model): + """Contact Details. + + All required parameters must be populated in order to send to Azure. + + :param contact_name: Required. Contact name of the person. + :type contact_name: str + :param phone: Required. Phone number of the contact person. + :type phone: str + :param phone_extension: Phone extension number of the contact person. + :type phone_extension: str + :param mobile: Mobile number of the contact person. + :type mobile: str + :param email_list: Required. List of Email-ids to be notified about job progress. + :type email_list: list[str] + :param notification_preference: Notification preference for a job stage. + :type notification_preference: list[~data_box_management_client.models.NotificationPreference] + """ + + _validation = { + 'contact_name': {'required': True}, + 'phone': {'required': True}, + 'email_list': {'required': True}, + } + + _attribute_map = { + 'contact_name': {'key': 'contactName', 'type': 'str'}, + 'phone': {'key': 'phone', 'type': 'str'}, + 'phone_extension': {'key': 'phoneExtension', 'type': 'str'}, + 'mobile': {'key': 'mobile', 'type': 'str'}, + 'email_list': {'key': 'emailList', 'type': '[str]'}, + 'notification_preference': {'key': 'notificationPreference', 'type': '[NotificationPreference]'}, + } + + def __init__( + self, + *, + contact_name: str, + phone: str, + email_list: List[str], + phone_extension: Optional[str] = None, + mobile: Optional[str] = None, + notification_preference: Optional[List["NotificationPreference"]] = None, + **kwargs + ): + super(ContactDetails, self).__init__(**kwargs) + self.contact_name = contact_name + self.phone = phone + self.phone_extension = phone_extension + self.mobile = mobile + self.email_list = email_list + self.notification_preference = notification_preference + + +class CopyLogDetails(msrest.serialization.Model): + """Details for log generated during copy. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: DataBoxAccountCopyLogDetails, DataBoxCustomerDiskCopyLogDetails, DataBoxDiskCopyLogDetails, DataBoxHeavyAccountCopyLogDetails. + + All required parameters must be populated in order to send to Azure. + + :param copy_log_details_type: Required. Indicates the type of job details.Constant filled by + server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", + "DataBoxCustomerDisk". + :type copy_log_details_type: str or ~data_box_management_client.models.ClassDiscriminator + """ + + _validation = { + 'copy_log_details_type': {'required': True}, + } + + _attribute_map = { + 'copy_log_details_type': {'key': 'copyLogDetailsType', 'type': 'str'}, + } + + _subtype_map = { + 'copy_log_details_type': {'DataBox': 'DataBoxAccountCopyLogDetails', 'DataBoxCustomerDisk': 'DataBoxCustomerDiskCopyLogDetails', 'DataBoxDisk': 'DataBoxDiskCopyLogDetails', 'DataBoxHeavy': 'DataBoxHeavyAccountCopyLogDetails'} + } + + def __init__( + self, + **kwargs + ): + super(CopyLogDetails, self).__init__(**kwargs) + self.copy_log_details_type = None # type: Optional[str] + + +class CopyProgress(msrest.serialization.Model): + """Copy progress. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar storage_account_name: Name of the storage account. This will be empty for data account + types other than storage account. + :vartype storage_account_name: str + :ivar transfer_type: Transfer type of data. Possible values include: "ImportToAzure", + "ExportFromAzure". + :vartype transfer_type: str or ~data_box_management_client.models.TransferType + :ivar data_account_type: Data Account Type. Possible values include: "StorageAccount", + "ManagedDisk". + :vartype data_account_type: str or ~data_box_management_client.models.DataAccountType + :ivar account_id: Id of the account where the data needs to be uploaded. + :vartype account_id: str + :ivar bytes_processed: To indicate bytes transferred. + :vartype bytes_processed: long + :ivar total_bytes_to_process: Total amount of data to be processed by the job. + :vartype total_bytes_to_process: long + :ivar files_processed: Number of files processed. + :vartype files_processed: long + :ivar total_files_to_process: Total files to process. + :vartype total_files_to_process: long + :ivar invalid_files_processed: Number of files not adhering to azure naming conventions which + were processed by automatic renaming. + :vartype invalid_files_processed: long + :ivar invalid_file_bytes_uploaded: Total amount of data not adhering to azure naming + conventions which were processed by automatic renaming. + :vartype invalid_file_bytes_uploaded: long + :ivar renamed_container_count: Number of folders not adhering to azure naming conventions which + were processed by automatic renaming. + :vartype renamed_container_count: long + :ivar files_errored_out: Number of files which could not be copied. + :vartype files_errored_out: long + :ivar directories_errored_out: To indicate directories errored out in the job. + :vartype directories_errored_out: long + :ivar invalid_directories_processed: To indicate directories renamed. + :vartype invalid_directories_processed: long + :ivar is_enumeration_in_progress: To indicate if enumeration of data is in progress. + Until this is true, the TotalBytesToProcess may not be valid. + :vartype is_enumeration_in_progress: bool + """ + + _validation = { + 'storage_account_name': {'readonly': True}, + 'transfer_type': {'readonly': True}, + 'data_account_type': {'readonly': True}, + 'account_id': {'readonly': True}, + 'bytes_processed': {'readonly': True}, + 'total_bytes_to_process': {'readonly': True}, + 'files_processed': {'readonly': True}, + 'total_files_to_process': {'readonly': True}, + 'invalid_files_processed': {'readonly': True}, + 'invalid_file_bytes_uploaded': {'readonly': True}, + 'renamed_container_count': {'readonly': True}, + 'files_errored_out': {'readonly': True}, + 'directories_errored_out': {'readonly': True}, + 'invalid_directories_processed': {'readonly': True}, + 'is_enumeration_in_progress': {'readonly': True}, + } + + _attribute_map = { + 'storage_account_name': {'key': 'storageAccountName', 'type': 'str'}, + 'transfer_type': {'key': 'transferType', 'type': 'str'}, + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'account_id': {'key': 'accountId', 'type': 'str'}, + 'bytes_processed': {'key': 'bytesProcessed', 'type': 'long'}, + 'total_bytes_to_process': {'key': 'totalBytesToProcess', 'type': 'long'}, + 'files_processed': {'key': 'filesProcessed', 'type': 'long'}, + 'total_files_to_process': {'key': 'totalFilesToProcess', 'type': 'long'}, + 'invalid_files_processed': {'key': 'invalidFilesProcessed', 'type': 'long'}, + 'invalid_file_bytes_uploaded': {'key': 'invalidFileBytesUploaded', 'type': 'long'}, + 'renamed_container_count': {'key': 'renamedContainerCount', 'type': 'long'}, + 'files_errored_out': {'key': 'filesErroredOut', 'type': 'long'}, + 'directories_errored_out': {'key': 'directoriesErroredOut', 'type': 'long'}, + 'invalid_directories_processed': {'key': 'invalidDirectoriesProcessed', 'type': 'long'}, + 'is_enumeration_in_progress': {'key': 'isEnumerationInProgress', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(CopyProgress, self).__init__(**kwargs) + self.storage_account_name = None + self.transfer_type = None + self.data_account_type = None + self.account_id = None + self.bytes_processed = None + self.total_bytes_to_process = None + self.files_processed = None + self.total_files_to_process = None + self.invalid_files_processed = None + self.invalid_file_bytes_uploaded = None + self.renamed_container_count = None + self.files_errored_out = None + self.directories_errored_out = None + self.invalid_directories_processed = None + self.is_enumeration_in_progress = None + + +class ValidationRequest(msrest.serialization.Model): + """Minimum request requirement of any validation category. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: CreateJobValidations. + + All required parameters must be populated in order to send to Azure. + + :param validation_category: Required. Identify the nature of validation.Constant filled by + server. + :type validation_category: str + :param individual_request_details: Required. List of request details contain validationType and + its request as key and value respectively. + :type individual_request_details: + list[~data_box_management_client.models.ValidationInputRequest] + """ + + _validation = { + 'validation_category': {'required': True}, + 'individual_request_details': {'required': True}, + } + + _attribute_map = { + 'validation_category': {'key': 'validationCategory', 'type': 'str'}, + 'individual_request_details': {'key': 'individualRequestDetails', 'type': '[ValidationInputRequest]'}, + } + + _subtype_map = { + 'validation_category': {'JobCreationValidation': 'CreateJobValidations'} + } + + def __init__( + self, + *, + individual_request_details: List["ValidationInputRequest"], + **kwargs + ): + super(ValidationRequest, self).__init__(**kwargs) + self.validation_category = None # type: Optional[str] + self.individual_request_details = individual_request_details + + +class CreateJobValidations(ValidationRequest): + """It does all pre-job creation validations. + + All required parameters must be populated in order to send to Azure. + + :param validation_category: Required. Identify the nature of validation.Constant filled by + server. + :type validation_category: str + :param individual_request_details: Required. List of request details contain validationType and + its request as key and value respectively. + :type individual_request_details: + list[~data_box_management_client.models.ValidationInputRequest] + """ + + _validation = { + 'validation_category': {'required': True}, + 'individual_request_details': {'required': True}, + } + + _attribute_map = { + 'validation_category': {'key': 'validationCategory', 'type': 'str'}, + 'individual_request_details': {'key': 'individualRequestDetails', 'type': '[ValidationInputRequest]'}, + } + + def __init__( + self, + *, + individual_request_details: List["ValidationInputRequest"], + **kwargs + ): + super(CreateJobValidations, self).__init__(individual_request_details=individual_request_details, **kwargs) + self.validation_category = 'JobCreationValidation' # type: str + + +class ValidationInputRequest(msrest.serialization.Model): + """Minimum fields that must be present in any type of validation request. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ValidateAddress, CreateOrderLimitForSubscriptionValidationRequest, DataTransferDetailsValidationRequest, PreferencesValidationRequest, SkuAvailabilityValidationRequest, SubscriptionIsAllowedToCreateJobValidationRequest. + + All required parameters must be populated in order to send to Azure. + + :param validation_type: Required. Identifies the type of validation request.Constant filled by + server. Possible values include: "ValidateAddress", + "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator + """ + + _validation = { + 'validation_type': {'required': True}, + } + + _attribute_map = { + 'validation_type': {'key': 'validationType', 'type': 'str'}, + } + + _subtype_map = { + 'validation_type': {'ValidateAddress': 'ValidateAddress', 'ValidateCreateOrderLimit': 'CreateOrderLimitForSubscriptionValidationRequest', 'ValidateDataTransferDetails': 'DataTransferDetailsValidationRequest', 'ValidatePreferences': 'PreferencesValidationRequest', 'ValidateSkuAvailability': 'SkuAvailabilityValidationRequest', 'ValidateSubscriptionIsAllowedToCreateJob': 'SubscriptionIsAllowedToCreateJobValidationRequest'} + } + + def __init__( + self, + **kwargs + ): + super(ValidationInputRequest, self).__init__(**kwargs) + self.validation_type = None # type: Optional[str] + + +class CreateOrderLimitForSubscriptionValidationRequest(ValidationInputRequest): + """Request to validate create order limit for current subscription. + + All required parameters must be populated in order to send to Azure. + + :param validation_type: Required. Identifies the type of validation request.Constant filled by + server. Possible values include: "ValidateAddress", + "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator + :param device_type: Required. Device type to be used for the job. Possible values include: + "DataBox", "DataBoxDisk", "DataBoxHeavy", "DataBoxCustomerDisk". + :type device_type: str or ~data_box_management_client.models.SkuName + """ + + _validation = { + 'validation_type': {'required': True}, + 'device_type': {'required': True}, + } + + _attribute_map = { + 'validation_type': {'key': 'validationType', 'type': 'str'}, + 'device_type': {'key': 'deviceType', 'type': 'str'}, + } + + def __init__( + self, + *, + device_type: Union[str, "SkuName"], + **kwargs + ): + super(CreateOrderLimitForSubscriptionValidationRequest, self).__init__(**kwargs) + self.validation_type = 'ValidateCreateOrderLimit' # type: str + self.device_type = device_type + + +class CreateOrderLimitForSubscriptionValidationResponseProperties(ValidationInputResponse): + """Properties of create order limit for subscription validation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param validation_type: Required. Identifies the type of validation response.Constant filled by + server. Possible values include: "ValidateAddress", + "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator + :ivar error: Error code and message of validation response. + :vartype error: ~data_box_management_client.models.CloudError + :ivar status: Create order limit validation status. Possible values include: "Valid", + "Invalid", "Skipped". + :vartype status: str or ~data_box_management_client.models.ValidationStatus + """ + + _validation = { + 'validation_type': {'required': True}, + 'error': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'validation_type': {'key': 'validationType', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'CloudError'}, + 'status': {'key': 'status', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(CreateOrderLimitForSubscriptionValidationResponseProperties, self).__init__(**kwargs) + self.validation_type = 'ValidateCreateOrderLimit' # type: str + self.status = None + + +class JobSecrets(msrest.serialization.Model): + """The base class for the secrets. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: DataboxJobSecrets, CustomerDiskJobSecrets, DataBoxDiskJobSecrets, DataBoxHeavyJobSecrets. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant + filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", + "DataBoxCustomerDisk". + :type job_secrets_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. + :vartype dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar error: Error while fetching the secrets. + :vartype error: ~data_box_management_client.models.CloudError + """ + + _validation = { + 'job_secrets_type': {'required': True}, + 'dc_access_security_code': {'readonly': True}, + 'error': {'readonly': True}, + } + + _attribute_map = { + 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, + 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, + 'error': {'key': 'error', 'type': 'CloudError'}, + } + + _subtype_map = { + 'job_secrets_type': {'DataBox': 'DataboxJobSecrets', 'DataBoxCustomerDisk': 'CustomerDiskJobSecrets', 'DataBoxDisk': 'DataBoxDiskJobSecrets', 'DataBoxHeavy': 'DataBoxHeavyJobSecrets'} + } + + def __init__( + self, + **kwargs + ): + super(JobSecrets, self).__init__(**kwargs) + self.job_secrets_type = None # type: Optional[str] + self.dc_access_security_code = None + self.error = None + + +class CustomerDiskJobSecrets(JobSecrets): + """The secrets related to customer disk job. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant + filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", + "DataBoxCustomerDisk". + :type job_secrets_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. + :vartype dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar error: Error while fetching the secrets. + :vartype error: ~data_box_management_client.models.CloudError + :ivar disk_secrets: Contains the list of secrets object for that device. + :vartype disk_secrets: list[~data_box_management_client.models.DiskSecret] + :ivar carrier_account_number: Carrier Account Number of the customer. + :vartype carrier_account_number: str + """ + + _validation = { + 'job_secrets_type': {'required': True}, + 'dc_access_security_code': {'readonly': True}, + 'error': {'readonly': True}, + 'disk_secrets': {'readonly': True}, + 'carrier_account_number': {'readonly': True}, + } + + _attribute_map = { + 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, + 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, + 'error': {'key': 'error', 'type': 'CloudError'}, + 'disk_secrets': {'key': 'diskSecrets', 'type': '[DiskSecret]'}, + 'carrier_account_number': {'key': 'carrierAccountNumber', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(CustomerDiskJobSecrets, self).__init__(**kwargs) + self.job_secrets_type = 'DataBoxCustomerDisk' # type: str + self.disk_secrets = None + self.carrier_account_number = None + + +class DataAccountDetails(msrest.serialization.Model): + """Account details of the data to be transferred. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ManagedDiskDetails, StorageAccountDetails. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Account Type of the data to be transferred.Constant filled + by server. Possible values include: "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param share_password: Password for all the shares to be created on the device. Should not be + passed for TransferType:ExportFromAzure jobs. If this is not passed, the service will generate + password itself. This will not be returned in Get Call. Password Requirements : Password must + be minimum of 12 and maximum of 64 characters. Password must have at least one uppercase + alphabet, one number and one special character. Password cannot have the following characters : + IilLoO0 Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. + :type share_password: str + """ + + _validation = { + 'data_account_type': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'share_password': {'key': 'sharePassword', 'type': 'str'}, + } + + _subtype_map = { + 'data_account_type': {'ManagedDisk': 'ManagedDiskDetails', 'StorageAccount': 'StorageAccountDetails'} + } + + def __init__( + self, + *, + share_password: Optional[str] = None, + **kwargs + ): + super(DataAccountDetails, self).__init__(**kwargs) + self.data_account_type = None # type: Optional[str] + self.share_password = share_password + + +class DataBoxAccountCopyLogDetails(CopyLogDetails): + """Copy log details for a storage account of a DataBox job. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param copy_log_details_type: Required. Indicates the type of job details.Constant filled by + server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", + "DataBoxCustomerDisk". + :type copy_log_details_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar account_name: Account name. + :vartype account_name: str + :ivar copy_log_link: Link for copy logs. + :vartype copy_log_link: str + :ivar copy_verbose_log_link: Link for copy verbose logs. This will be set only when + LogCollectionLevel is set to Verbose. + :vartype copy_verbose_log_link: str + """ + + _validation = { + 'copy_log_details_type': {'required': True}, + 'account_name': {'readonly': True}, + 'copy_log_link': {'readonly': True}, + 'copy_verbose_log_link': {'readonly': True}, + } + + _attribute_map = { + 'copy_log_details_type': {'key': 'copyLogDetailsType', 'type': 'str'}, + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'copy_log_link': {'key': 'copyLogLink', 'type': 'str'}, + 'copy_verbose_log_link': {'key': 'copyVerboseLogLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DataBoxAccountCopyLogDetails, self).__init__(**kwargs) + self.copy_log_details_type = 'DataBox' # type: str + self.account_name = None + self.copy_log_link = None + self.copy_verbose_log_link = None + + +class DataBoxCustomerDiskCopyLogDetails(CopyLogDetails): + """Copy Log Details for customer disk. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param copy_log_details_type: Required. Indicates the type of job details.Constant filled by + server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", + "DataBoxCustomerDisk". + :type copy_log_details_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar disk_serial_number: Disk Serial Number. + :vartype disk_serial_number: str + :ivar error_log_link: Link for copy error logs. + :vartype error_log_link: str + :ivar verbose_log_link: Link for copy verbose logs. + :vartype verbose_log_link: str + """ + + _validation = { + 'copy_log_details_type': {'required': True}, + 'disk_serial_number': {'readonly': True}, + 'error_log_link': {'readonly': True}, + 'verbose_log_link': {'readonly': True}, + } + + _attribute_map = { + 'copy_log_details_type': {'key': 'copyLogDetailsType', 'type': 'str'}, + 'disk_serial_number': {'key': 'diskSerialNumber', 'type': 'str'}, + 'error_log_link': {'key': 'errorLogLink', 'type': 'str'}, + 'verbose_log_link': {'key': 'verboseLogLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DataBoxCustomerDiskCopyLogDetails, self).__init__(**kwargs) + self.copy_log_details_type = 'DataBoxCustomerDisk' # type: str + self.disk_serial_number = None + self.error_log_link = None + self.verbose_log_link = None + + +class DataBoxCustomerDiskCopyProgress(CopyProgress): + """DataBox CustomerDisk Copy Progress. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar storage_account_name: Name of the storage account. This will be empty for data account + types other than storage account. + :vartype storage_account_name: str + :ivar transfer_type: Transfer type of data. Possible values include: "ImportToAzure", + "ExportFromAzure". + :vartype transfer_type: str or ~data_box_management_client.models.TransferType + :ivar data_account_type: Data Account Type. Possible values include: "StorageAccount", + "ManagedDisk". + :vartype data_account_type: str or ~data_box_management_client.models.DataAccountType + :ivar account_id: Id of the account where the data needs to be uploaded. + :vartype account_id: str + :ivar bytes_processed: To indicate bytes transferred. + :vartype bytes_processed: long + :ivar total_bytes_to_process: Total amount of data to be processed by the job. + :vartype total_bytes_to_process: long + :ivar files_processed: Number of files processed. + :vartype files_processed: long + :ivar total_files_to_process: Total files to process. + :vartype total_files_to_process: long + :ivar invalid_files_processed: Number of files not adhering to azure naming conventions which + were processed by automatic renaming. + :vartype invalid_files_processed: long + :ivar invalid_file_bytes_uploaded: Total amount of data not adhering to azure naming + conventions which were processed by automatic renaming. + :vartype invalid_file_bytes_uploaded: long + :ivar renamed_container_count: Number of folders not adhering to azure naming conventions which + were processed by automatic renaming. + :vartype renamed_container_count: long + :ivar files_errored_out: Number of files which could not be copied. + :vartype files_errored_out: long + :ivar directories_errored_out: To indicate directories errored out in the job. + :vartype directories_errored_out: long + :ivar invalid_directories_processed: To indicate directories renamed. + :vartype invalid_directories_processed: long + :ivar is_enumeration_in_progress: To indicate if enumeration of data is in progress. + Until this is true, the TotalBytesToProcess may not be valid. + :vartype is_enumeration_in_progress: bool + :ivar disk_serial_number: Disk Serial Number. + :vartype disk_serial_number: str + :ivar copy_status: The Status of the copy. Possible values include: "NotStarted", "InProgress", + "Completed", "CompletedWithErrors", "Failed", "NotReturned", "HardwareError", + "DeviceFormatted", "DeviceMetadataModified", "StorageAccountNotAccessible", "UnsupportedData". + :vartype copy_status: str or ~data_box_management_client.models.CopyStatus + """ + + _validation = { + 'storage_account_name': {'readonly': True}, + 'transfer_type': {'readonly': True}, + 'data_account_type': {'readonly': True}, + 'account_id': {'readonly': True}, + 'bytes_processed': {'readonly': True}, + 'total_bytes_to_process': {'readonly': True}, + 'files_processed': {'readonly': True}, + 'total_files_to_process': {'readonly': True}, + 'invalid_files_processed': {'readonly': True}, + 'invalid_file_bytes_uploaded': {'readonly': True}, + 'renamed_container_count': {'readonly': True}, + 'files_errored_out': {'readonly': True}, + 'directories_errored_out': {'readonly': True}, + 'invalid_directories_processed': {'readonly': True}, + 'is_enumeration_in_progress': {'readonly': True}, + 'disk_serial_number': {'readonly': True}, + 'copy_status': {'readonly': True}, + } + + _attribute_map = { + 'storage_account_name': {'key': 'storageAccountName', 'type': 'str'}, + 'transfer_type': {'key': 'transferType', 'type': 'str'}, + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'account_id': {'key': 'accountId', 'type': 'str'}, + 'bytes_processed': {'key': 'bytesProcessed', 'type': 'long'}, + 'total_bytes_to_process': {'key': 'totalBytesToProcess', 'type': 'long'}, + 'files_processed': {'key': 'filesProcessed', 'type': 'long'}, + 'total_files_to_process': {'key': 'totalFilesToProcess', 'type': 'long'}, + 'invalid_files_processed': {'key': 'invalidFilesProcessed', 'type': 'long'}, + 'invalid_file_bytes_uploaded': {'key': 'invalidFileBytesUploaded', 'type': 'long'}, + 'renamed_container_count': {'key': 'renamedContainerCount', 'type': 'long'}, + 'files_errored_out': {'key': 'filesErroredOut', 'type': 'long'}, + 'directories_errored_out': {'key': 'directoriesErroredOut', 'type': 'long'}, + 'invalid_directories_processed': {'key': 'invalidDirectoriesProcessed', 'type': 'long'}, + 'is_enumeration_in_progress': {'key': 'isEnumerationInProgress', 'type': 'bool'}, + 'disk_serial_number': {'key': 'diskSerialNumber', 'type': 'str'}, + 'copy_status': {'key': 'copyStatus', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DataBoxCustomerDiskCopyProgress, self).__init__(**kwargs) + self.disk_serial_number = None + self.copy_status = None + + +class JobDetails(msrest.serialization.Model): + """Job details. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: DataBoxJobDetails, DataBoxCustomerDiskJobDetails, DataBoxDiskJobDetails, DataBoxHeavyJobDetails. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar job_stages: List of stages that run in the job. + :vartype job_stages: list[~data_box_management_client.models.JobStages] + :param contact_details: Required. Contact details for notification and shipping. + :type contact_details: ~data_box_management_client.models.ContactDetails + :param shipping_address: Shipping address of the customer. + :type shipping_address: ~data_box_management_client.models.ShippingAddress + :ivar delivery_package: Delivery package shipping details. + :vartype delivery_package: ~data_box_management_client.models.PackageShippingDetails + :ivar return_package: Return package shipping details. + :vartype return_package: ~data_box_management_client.models.PackageShippingDetails + :param data_import_details: Details of the data to be imported into azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param data_export_details: Details of the data to be exported from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] + :param job_details_type: Required. Indicates the type of job details.Constant filled by server. + Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", "DataBoxCustomerDisk". + :type job_details_type: str or ~data_box_management_client.models.ClassDiscriminator + :param preferences: Preferences for the order. + :type preferences: ~data_box_management_client.models.Preferences + :ivar copy_log_details: List of copy log details. + :vartype copy_log_details: list[~data_box_management_client.models.CopyLogDetails] + :ivar reverse_shipment_label_sas_key: Shared access key to download the return shipment label. + :vartype reverse_shipment_label_sas_key: str + :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. + :vartype chain_of_custody_sas_key: str + :param key_encryption_key: Details about which key encryption type is being used. + :type key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey + :param expected_data_size_in_tera_bytes: The expected size of the data, which needs to be + transferred in this job, in terabytes. + :type expected_data_size_in_tera_bytes: int + :ivar actions: Available actions on the job. + :vartype actions: list[str or ~data_box_management_client.models.CustomerResolutionCode] + :ivar last_mitigation_action_on_job: Last mitigation action performed on the job. + :vartype last_mitigation_action_on_job: + ~data_box_management_client.models.LastMitigationActionOnJob + """ + + _validation = { + 'job_stages': {'readonly': True}, + 'contact_details': {'required': True}, + 'delivery_package': {'readonly': True}, + 'return_package': {'readonly': True}, + 'job_details_type': {'required': True}, + 'copy_log_details': {'readonly': True}, + 'reverse_shipment_label_sas_key': {'readonly': True}, + 'chain_of_custody_sas_key': {'readonly': True}, + 'actions': {'readonly': True}, + 'last_mitigation_action_on_job': {'readonly': True}, + } + + _attribute_map = { + 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, + 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, + 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, + 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, + 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, + 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, + 'preferences': {'key': 'preferences', 'type': 'Preferences'}, + 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, + 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, + 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, + 'expected_data_size_in_tera_bytes': {'key': 'expectedDataSizeInTeraBytes', 'type': 'int'}, + 'actions': {'key': 'actions', 'type': '[str]'}, + 'last_mitigation_action_on_job': {'key': 'lastMitigationActionOnJob', 'type': 'LastMitigationActionOnJob'}, + } + + _subtype_map = { + 'job_details_type': {'DataBox': 'DataBoxJobDetails', 'DataBoxCustomerDisk': 'DataBoxCustomerDiskJobDetails', 'DataBoxDisk': 'DataBoxDiskJobDetails', 'DataBoxHeavy': 'DataBoxHeavyJobDetails'} + } + + def __init__( + self, + *, + contact_details: "ContactDetails", + shipping_address: Optional["ShippingAddress"] = None, + data_import_details: Optional[List["DataImportDetails"]] = None, + data_export_details: Optional[List["DataExportDetails"]] = None, + preferences: Optional["Preferences"] = None, + key_encryption_key: Optional["KeyEncryptionKey"] = None, + expected_data_size_in_tera_bytes: Optional[int] = None, + **kwargs + ): + super(JobDetails, self).__init__(**kwargs) + self.job_stages = None + self.contact_details = contact_details + self.shipping_address = shipping_address + self.delivery_package = None + self.return_package = None + self.data_import_details = data_import_details + self.data_export_details = data_export_details + self.job_details_type = None # type: Optional[str] + self.preferences = preferences + self.copy_log_details = None + self.reverse_shipment_label_sas_key = None + self.chain_of_custody_sas_key = None + self.key_encryption_key = key_encryption_key + self.expected_data_size_in_tera_bytes = expected_data_size_in_tera_bytes + self.actions = None + self.last_mitigation_action_on_job = None + + +class DataBoxCustomerDiskJobDetails(JobDetails): + """Customer disk job details. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar job_stages: List of stages that run in the job. + :vartype job_stages: list[~data_box_management_client.models.JobStages] + :param contact_details: Required. Contact details for notification and shipping. + :type contact_details: ~data_box_management_client.models.ContactDetails + :param shipping_address: Shipping address of the customer. + :type shipping_address: ~data_box_management_client.models.ShippingAddress + :ivar delivery_package: Delivery package shipping details. + :vartype delivery_package: ~data_box_management_client.models.PackageShippingDetails + :ivar return_package: Return package shipping details. + :vartype return_package: ~data_box_management_client.models.PackageShippingDetails + :param data_import_details: Details of the data to be imported into azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param data_export_details: Details of the data to be exported from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] + :param job_details_type: Required. Indicates the type of job details.Constant filled by server. + Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", "DataBoxCustomerDisk". + :type job_details_type: str or ~data_box_management_client.models.ClassDiscriminator + :param preferences: Preferences for the order. + :type preferences: ~data_box_management_client.models.Preferences + :ivar copy_log_details: List of copy log details. + :vartype copy_log_details: list[~data_box_management_client.models.CopyLogDetails] + :ivar reverse_shipment_label_sas_key: Shared access key to download the return shipment label. + :vartype reverse_shipment_label_sas_key: str + :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. + :vartype chain_of_custody_sas_key: str + :param key_encryption_key: Details about which key encryption type is being used. + :type key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey + :param expected_data_size_in_tera_bytes: The expected size of the data, which needs to be + transferred in this job, in terabytes. + :type expected_data_size_in_tera_bytes: int + :ivar actions: Available actions on the job. + :vartype actions: list[str or ~data_box_management_client.models.CustomerResolutionCode] + :ivar last_mitigation_action_on_job: Last mitigation action performed on the job. + :vartype last_mitigation_action_on_job: + ~data_box_management_client.models.LastMitigationActionOnJob + :param import_disk_details_collection: Contains the map of disk serial number to the disk + details for import jobs. + :type import_disk_details_collection: dict[str, + ~data_box_management_client.models.ImportDiskDetails] + :ivar export_disk_details_collection: Contains the map of disk serial number to the disk + details for export jobs. + :vartype export_disk_details_collection: dict[str, + ~data_box_management_client.models.ExportDiskDetails] + :ivar copy_progress_list: Copy progress per disk. + :vartype copy_progress_list: + list[~data_box_management_client.models.DataBoxCustomerDiskCopyProgress] + :ivar delivery_package_details: Delivery package shipping details. + :vartype delivery_package_details: ~data_box_management_client.models.PackageCarrierInfo + :param return_package_details: Required. Return package shipping details. + :type return_package_details: ~data_box_management_client.models.PackageCarrierDetails + :param xt_passthrough_job_arm_id: ARM id of the XT passthrough job. + :type xt_passthrough_job_arm_id: str + :ivar datacenter_address: Datacenter address for given sku in a region. + :vartype datacenter_address: ~data_box_management_client.models.DatacenterAddressResponse + """ + + _validation = { + 'job_stages': {'readonly': True}, + 'contact_details': {'required': True}, + 'delivery_package': {'readonly': True}, + 'return_package': {'readonly': True}, + 'job_details_type': {'required': True}, + 'copy_log_details': {'readonly': True}, + 'reverse_shipment_label_sas_key': {'readonly': True}, + 'chain_of_custody_sas_key': {'readonly': True}, + 'actions': {'readonly': True}, + 'last_mitigation_action_on_job': {'readonly': True}, + 'export_disk_details_collection': {'readonly': True}, + 'copy_progress_list': {'readonly': True}, + 'delivery_package_details': {'readonly': True}, + 'return_package_details': {'required': True}, + 'datacenter_address': {'readonly': True}, + } + + _attribute_map = { + 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, + 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, + 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, + 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, + 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, + 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, + 'preferences': {'key': 'preferences', 'type': 'Preferences'}, + 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, + 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, + 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, + 'expected_data_size_in_tera_bytes': {'key': 'expectedDataSizeInTeraBytes', 'type': 'int'}, + 'actions': {'key': 'actions', 'type': '[str]'}, + 'last_mitigation_action_on_job': {'key': 'lastMitigationActionOnJob', 'type': 'LastMitigationActionOnJob'}, + 'import_disk_details_collection': {'key': 'importDiskDetailsCollection', 'type': '{ImportDiskDetails}'}, + 'export_disk_details_collection': {'key': 'exportDiskDetailsCollection', 'type': '{ExportDiskDetails}'}, + 'copy_progress_list': {'key': 'copyProgressList', 'type': '[DataBoxCustomerDiskCopyProgress]'}, + 'delivery_package_details': {'key': 'deliveryPackageDetails', 'type': 'PackageCarrierInfo'}, + 'return_package_details': {'key': 'returnPackageDetails', 'type': 'PackageCarrierDetails'}, + 'xt_passthrough_job_arm_id': {'key': 'xtPassthroughJobArmId', 'type': 'str'}, + 'datacenter_address': {'key': 'datacenterAddress', 'type': 'DatacenterAddressResponse'}, + } + + def __init__( + self, + *, + contact_details: "ContactDetails", + return_package_details: "PackageCarrierDetails", + shipping_address: Optional["ShippingAddress"] = None, + data_import_details: Optional[List["DataImportDetails"]] = None, + data_export_details: Optional[List["DataExportDetails"]] = None, + preferences: Optional["Preferences"] = None, + key_encryption_key: Optional["KeyEncryptionKey"] = None, + expected_data_size_in_tera_bytes: Optional[int] = None, + import_disk_details_collection: Optional[Dict[str, "ImportDiskDetails"]] = None, + xt_passthrough_job_arm_id: Optional[str] = None, + **kwargs + ): + super(DataBoxCustomerDiskJobDetails, self).__init__(contact_details=contact_details, shipping_address=shipping_address, data_import_details=data_import_details, data_export_details=data_export_details, preferences=preferences, key_encryption_key=key_encryption_key, expected_data_size_in_tera_bytes=expected_data_size_in_tera_bytes, **kwargs) + self.job_details_type = 'DataBoxCustomerDisk' # type: str + self.import_disk_details_collection = import_disk_details_collection + self.export_disk_details_collection = None + self.copy_progress_list = None + self.delivery_package_details = None + self.return_package_details = return_package_details + self.xt_passthrough_job_arm_id = xt_passthrough_job_arm_id + self.datacenter_address = None + + +class DataBoxDiskCopyLogDetails(CopyLogDetails): + """Copy Log Details for a disk. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param copy_log_details_type: Required. Indicates the type of job details.Constant filled by + server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", + "DataBoxCustomerDisk". + :type copy_log_details_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar disk_serial_number: Disk Serial Number. + :vartype disk_serial_number: str + :ivar error_log_link: Link for copy error logs. + :vartype error_log_link: str + :ivar verbose_log_link: Link for copy verbose logs. + :vartype verbose_log_link: str + """ + + _validation = { + 'copy_log_details_type': {'required': True}, + 'disk_serial_number': {'readonly': True}, + 'error_log_link': {'readonly': True}, + 'verbose_log_link': {'readonly': True}, + } + + _attribute_map = { + 'copy_log_details_type': {'key': 'copyLogDetailsType', 'type': 'str'}, + 'disk_serial_number': {'key': 'diskSerialNumber', 'type': 'str'}, + 'error_log_link': {'key': 'errorLogLink', 'type': 'str'}, + 'verbose_log_link': {'key': 'verboseLogLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DataBoxDiskCopyLogDetails, self).__init__(**kwargs) + self.copy_log_details_type = 'DataBoxDisk' # type: str + self.disk_serial_number = None + self.error_log_link = None + self.verbose_log_link = None + + +class DataBoxDiskCopyProgress(msrest.serialization.Model): + """DataBox Disk Copy Progress. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar serial_number: The serial number of the disk. + :vartype serial_number: str + :ivar bytes_copied: Bytes copied during the copy of disk. + :vartype bytes_copied: long + :ivar percent_complete: Indicates the percentage completed for the copy of the disk. + :vartype percent_complete: int + :ivar status: The Status of the copy. Possible values include: "NotStarted", "InProgress", + "Completed", "CompletedWithErrors", "Failed", "NotReturned", "HardwareError", + "DeviceFormatted", "DeviceMetadataModified", "StorageAccountNotAccessible", "UnsupportedData". + :vartype status: str or ~data_box_management_client.models.CopyStatus + """ + + _validation = { + 'serial_number': {'readonly': True}, + 'bytes_copied': {'readonly': True}, + 'percent_complete': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'serial_number': {'key': 'serialNumber', 'type': 'str'}, + 'bytes_copied': {'key': 'bytesCopied', 'type': 'long'}, + 'percent_complete': {'key': 'percentComplete', 'type': 'int'}, + 'status': {'key': 'status', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DataBoxDiskCopyProgress, self).__init__(**kwargs) + self.serial_number = None + self.bytes_copied = None + self.percent_complete = None + self.status = None + + +class DataBoxDiskJobDetails(JobDetails): + """DataBox Disk Job Details. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar job_stages: List of stages that run in the job. + :vartype job_stages: list[~data_box_management_client.models.JobStages] + :param contact_details: Required. Contact details for notification and shipping. + :type contact_details: ~data_box_management_client.models.ContactDetails + :param shipping_address: Shipping address of the customer. + :type shipping_address: ~data_box_management_client.models.ShippingAddress + :ivar delivery_package: Delivery package shipping details. + :vartype delivery_package: ~data_box_management_client.models.PackageShippingDetails + :ivar return_package: Return package shipping details. + :vartype return_package: ~data_box_management_client.models.PackageShippingDetails + :param data_import_details: Details of the data to be imported into azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param data_export_details: Details of the data to be exported from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] + :param job_details_type: Required. Indicates the type of job details.Constant filled by server. + Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", "DataBoxCustomerDisk". + :type job_details_type: str or ~data_box_management_client.models.ClassDiscriminator + :param preferences: Preferences for the order. + :type preferences: ~data_box_management_client.models.Preferences + :ivar copy_log_details: List of copy log details. + :vartype copy_log_details: list[~data_box_management_client.models.CopyLogDetails] + :ivar reverse_shipment_label_sas_key: Shared access key to download the return shipment label. + :vartype reverse_shipment_label_sas_key: str + :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. + :vartype chain_of_custody_sas_key: str + :param key_encryption_key: Details about which key encryption type is being used. + :type key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey + :param expected_data_size_in_tera_bytes: The expected size of the data, which needs to be + transferred in this job, in terabytes. + :type expected_data_size_in_tera_bytes: int + :ivar actions: Available actions on the job. + :vartype actions: list[str or ~data_box_management_client.models.CustomerResolutionCode] + :ivar last_mitigation_action_on_job: Last mitigation action performed on the job. + :vartype last_mitigation_action_on_job: + ~data_box_management_client.models.LastMitigationActionOnJob + :param preferred_disks: User preference on what size disks are needed for the job. The map is + from the disk size in TB to the count. Eg. {2,5} means 5 disks of 2 TB size. Key is string but + will be checked against an int. + :type preferred_disks: dict[str, int] + :ivar copy_progress: Copy progress per disk. + :vartype copy_progress: list[~data_box_management_client.models.DataBoxDiskCopyProgress] + :ivar disks_and_size_details: Contains the map of disk serial number to the disk size being + used for the job. Is returned only after the disks are shipped to the customer. + :vartype disks_and_size_details: dict[str, int] + :param passkey: User entered passkey for DataBox Disk job. + :type passkey: str + """ + + _validation = { + 'job_stages': {'readonly': True}, + 'contact_details': {'required': True}, + 'delivery_package': {'readonly': True}, + 'return_package': {'readonly': True}, + 'job_details_type': {'required': True}, + 'copy_log_details': {'readonly': True}, + 'reverse_shipment_label_sas_key': {'readonly': True}, + 'chain_of_custody_sas_key': {'readonly': True}, + 'actions': {'readonly': True}, + 'last_mitigation_action_on_job': {'readonly': True}, + 'copy_progress': {'readonly': True}, + 'disks_and_size_details': {'readonly': True}, + } + + _attribute_map = { + 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, + 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, + 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, + 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, + 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, + 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, + 'preferences': {'key': 'preferences', 'type': 'Preferences'}, + 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, + 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, + 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, + 'expected_data_size_in_tera_bytes': {'key': 'expectedDataSizeInTeraBytes', 'type': 'int'}, + 'actions': {'key': 'actions', 'type': '[str]'}, + 'last_mitigation_action_on_job': {'key': 'lastMitigationActionOnJob', 'type': 'LastMitigationActionOnJob'}, + 'preferred_disks': {'key': 'preferredDisks', 'type': '{int}'}, + 'copy_progress': {'key': 'copyProgress', 'type': '[DataBoxDiskCopyProgress]'}, + 'disks_and_size_details': {'key': 'disksAndSizeDetails', 'type': '{int}'}, + 'passkey': {'key': 'passkey', 'type': 'str'}, + } + + def __init__( + self, + *, + contact_details: "ContactDetails", + shipping_address: Optional["ShippingAddress"] = None, + data_import_details: Optional[List["DataImportDetails"]] = None, + data_export_details: Optional[List["DataExportDetails"]] = None, + preferences: Optional["Preferences"] = None, + key_encryption_key: Optional["KeyEncryptionKey"] = None, + expected_data_size_in_tera_bytes: Optional[int] = None, + preferred_disks: Optional[Dict[str, int]] = None, + passkey: Optional[str] = None, + **kwargs + ): + super(DataBoxDiskJobDetails, self).__init__(contact_details=contact_details, shipping_address=shipping_address, data_import_details=data_import_details, data_export_details=data_export_details, preferences=preferences, key_encryption_key=key_encryption_key, expected_data_size_in_tera_bytes=expected_data_size_in_tera_bytes, **kwargs) + self.job_details_type = 'DataBoxDisk' # type: str + self.preferred_disks = preferred_disks + self.copy_progress = None + self.disks_and_size_details = None + self.passkey = passkey + + +class DataBoxDiskJobSecrets(JobSecrets): + """The secrets related to disk job. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant + filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", + "DataBoxCustomerDisk". + :type job_secrets_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. + :vartype dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar error: Error while fetching the secrets. + :vartype error: ~data_box_management_client.models.CloudError + :ivar disk_secrets: Contains the list of secrets object for that device. + :vartype disk_secrets: list[~data_box_management_client.models.DiskSecret] + :ivar pass_key: PassKey for the disk Job. + :vartype pass_key: str + :ivar is_passkey_user_defined: Whether passkey was provided by user. + :vartype is_passkey_user_defined: bool + """ + + _validation = { + 'job_secrets_type': {'required': True}, + 'dc_access_security_code': {'readonly': True}, + 'error': {'readonly': True}, + 'disk_secrets': {'readonly': True}, + 'pass_key': {'readonly': True}, + 'is_passkey_user_defined': {'readonly': True}, + } + + _attribute_map = { + 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, + 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, + 'error': {'key': 'error', 'type': 'CloudError'}, + 'disk_secrets': {'key': 'diskSecrets', 'type': '[DiskSecret]'}, + 'pass_key': {'key': 'passKey', 'type': 'str'}, + 'is_passkey_user_defined': {'key': 'isPasskeyUserDefined', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(DataBoxDiskJobSecrets, self).__init__(**kwargs) + self.job_secrets_type = 'DataBoxDisk' # type: str + self.disk_secrets = None + self.pass_key = None + self.is_passkey_user_defined = None + + +class DataBoxHeavyAccountCopyLogDetails(CopyLogDetails): + """Copy log details for a storage account for Databox heavy. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param copy_log_details_type: Required. Indicates the type of job details.Constant filled by + server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", + "DataBoxCustomerDisk". + :type copy_log_details_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar account_name: Account name. + :vartype account_name: str + :ivar copy_log_link: Link for copy logs. + :vartype copy_log_link: list[str] + :ivar copy_verbose_log_link: Link for copy verbose logs. This will be set only when the + LogCollectionLevel is set to verbose. + :vartype copy_verbose_log_link: list[str] + """ + + _validation = { + 'copy_log_details_type': {'required': True}, + 'account_name': {'readonly': True}, + 'copy_log_link': {'readonly': True}, + 'copy_verbose_log_link': {'readonly': True}, + } + + _attribute_map = { + 'copy_log_details_type': {'key': 'copyLogDetailsType', 'type': 'str'}, + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'copy_log_link': {'key': 'copyLogLink', 'type': '[str]'}, + 'copy_verbose_log_link': {'key': 'copyVerboseLogLink', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(DataBoxHeavyAccountCopyLogDetails, self).__init__(**kwargs) + self.copy_log_details_type = 'DataBoxHeavy' # type: str + self.account_name = None + self.copy_log_link = None + self.copy_verbose_log_link = None + + +class DataBoxHeavyJobDetails(JobDetails): + """Databox Heavy Device Job Details. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar job_stages: List of stages that run in the job. + :vartype job_stages: list[~data_box_management_client.models.JobStages] + :param contact_details: Required. Contact details for notification and shipping. + :type contact_details: ~data_box_management_client.models.ContactDetails + :param shipping_address: Shipping address of the customer. + :type shipping_address: ~data_box_management_client.models.ShippingAddress + :ivar delivery_package: Delivery package shipping details. + :vartype delivery_package: ~data_box_management_client.models.PackageShippingDetails + :ivar return_package: Return package shipping details. + :vartype return_package: ~data_box_management_client.models.PackageShippingDetails + :param data_import_details: Details of the data to be imported into azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param data_export_details: Details of the data to be exported from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] + :param job_details_type: Required. Indicates the type of job details.Constant filled by server. + Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", "DataBoxCustomerDisk". + :type job_details_type: str or ~data_box_management_client.models.ClassDiscriminator + :param preferences: Preferences for the order. + :type preferences: ~data_box_management_client.models.Preferences + :ivar copy_log_details: List of copy log details. + :vartype copy_log_details: list[~data_box_management_client.models.CopyLogDetails] + :ivar reverse_shipment_label_sas_key: Shared access key to download the return shipment label. + :vartype reverse_shipment_label_sas_key: str + :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. + :vartype chain_of_custody_sas_key: str + :param key_encryption_key: Details about which key encryption type is being used. + :type key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey + :param expected_data_size_in_tera_bytes: The expected size of the data, which needs to be + transferred in this job, in terabytes. + :type expected_data_size_in_tera_bytes: int + :ivar actions: Available actions on the job. + :vartype actions: list[str or ~data_box_management_client.models.CustomerResolutionCode] + :ivar last_mitigation_action_on_job: Last mitigation action performed on the job. + :vartype last_mitigation_action_on_job: + ~data_box_management_client.models.LastMitigationActionOnJob + :ivar copy_progress: Copy progress per account. + :vartype copy_progress: list[~data_box_management_client.models.CopyProgress] + :param device_password: Set Device password for unlocking Databox Heavy. Should not be passed + for TransferType:ExportFromAzure jobs. If this is not passed, the service will generate + password itself. This will not be returned in Get Call. Password Requirements : Password must + be minimum of 12 and maximum of 64 characters. Password must have at least one uppercase + alphabet, one number and one special character. Password cannot have the following characters : + IilLoO0 Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. + :type device_password: str + """ + + _validation = { + 'job_stages': {'readonly': True}, + 'contact_details': {'required': True}, + 'delivery_package': {'readonly': True}, + 'return_package': {'readonly': True}, + 'job_details_type': {'required': True}, + 'copy_log_details': {'readonly': True}, + 'reverse_shipment_label_sas_key': {'readonly': True}, + 'chain_of_custody_sas_key': {'readonly': True}, + 'actions': {'readonly': True}, + 'last_mitigation_action_on_job': {'readonly': True}, + 'copy_progress': {'readonly': True}, + } + + _attribute_map = { + 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, + 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, + 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, + 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, + 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, + 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, + 'preferences': {'key': 'preferences', 'type': 'Preferences'}, + 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, + 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, + 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, + 'expected_data_size_in_tera_bytes': {'key': 'expectedDataSizeInTeraBytes', 'type': 'int'}, + 'actions': {'key': 'actions', 'type': '[str]'}, + 'last_mitigation_action_on_job': {'key': 'lastMitigationActionOnJob', 'type': 'LastMitigationActionOnJob'}, + 'copy_progress': {'key': 'copyProgress', 'type': '[CopyProgress]'}, + 'device_password': {'key': 'devicePassword', 'type': 'str'}, + } + + def __init__( + self, + *, + contact_details: "ContactDetails", + shipping_address: Optional["ShippingAddress"] = None, + data_import_details: Optional[List["DataImportDetails"]] = None, + data_export_details: Optional[List["DataExportDetails"]] = None, + preferences: Optional["Preferences"] = None, + key_encryption_key: Optional["KeyEncryptionKey"] = None, + expected_data_size_in_tera_bytes: Optional[int] = None, + device_password: Optional[str] = None, + **kwargs + ): + super(DataBoxHeavyJobDetails, self).__init__(contact_details=contact_details, shipping_address=shipping_address, data_import_details=data_import_details, data_export_details=data_export_details, preferences=preferences, key_encryption_key=key_encryption_key, expected_data_size_in_tera_bytes=expected_data_size_in_tera_bytes, **kwargs) + self.job_details_type = 'DataBoxHeavy' # type: str + self.copy_progress = None + self.device_password = device_password + + +class DataBoxHeavyJobSecrets(JobSecrets): + """The secrets related to a databox heavy job. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant + filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", + "DataBoxCustomerDisk". + :type job_secrets_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. + :vartype dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar error: Error while fetching the secrets. + :vartype error: ~data_box_management_client.models.CloudError + :ivar cabinet_pod_secrets: Contains the list of secret objects for a databox heavy job. + :vartype cabinet_pod_secrets: list[~data_box_management_client.models.DataBoxHeavySecret] + """ + + _validation = { + 'job_secrets_type': {'required': True}, + 'dc_access_security_code': {'readonly': True}, + 'error': {'readonly': True}, + 'cabinet_pod_secrets': {'readonly': True}, + } + + _attribute_map = { + 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, + 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, + 'error': {'key': 'error', 'type': 'CloudError'}, + 'cabinet_pod_secrets': {'key': 'cabinetPodSecrets', 'type': '[DataBoxHeavySecret]'}, + } + + def __init__( + self, + **kwargs + ): + super(DataBoxHeavyJobSecrets, self).__init__(**kwargs) + self.job_secrets_type = 'DataBoxHeavy' # type: str + self.cabinet_pod_secrets = None + + +class DataBoxHeavySecret(msrest.serialization.Model): + """The secrets related to a databox heavy. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar device_serial_number: Serial number of the assigned device. + :vartype device_serial_number: str + :ivar device_password: Password for out of the box experience on device. + :vartype device_password: str + :ivar network_configurations: Network configuration of the appliance. + :vartype network_configurations: + list[~data_box_management_client.models.ApplianceNetworkConfiguration] + :ivar encoded_validation_cert_pub_key: The base 64 encoded public key to authenticate with the + device. + :vartype encoded_validation_cert_pub_key: str + :ivar account_credential_details: Per account level access credentials. + :vartype account_credential_details: + list[~data_box_management_client.models.AccountCredentialDetails] + """ + + _validation = { + 'device_serial_number': {'readonly': True}, + 'device_password': {'readonly': True}, + 'network_configurations': {'readonly': True}, + 'encoded_validation_cert_pub_key': {'readonly': True}, + 'account_credential_details': {'readonly': True}, + } + + _attribute_map = { + 'device_serial_number': {'key': 'deviceSerialNumber', 'type': 'str'}, + 'device_password': {'key': 'devicePassword', 'type': 'str'}, + 'network_configurations': {'key': 'networkConfigurations', 'type': '[ApplianceNetworkConfiguration]'}, + 'encoded_validation_cert_pub_key': {'key': 'encodedValidationCertPubKey', 'type': 'str'}, + 'account_credential_details': {'key': 'accountCredentialDetails', 'type': '[AccountCredentialDetails]'}, + } + + def __init__( + self, + **kwargs + ): + super(DataBoxHeavySecret, self).__init__(**kwargs) + self.device_serial_number = None + self.device_password = None + self.network_configurations = None + self.encoded_validation_cert_pub_key = None + self.account_credential_details = None + + +class DataBoxJobDetails(JobDetails): + """Databox Job Details. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar job_stages: List of stages that run in the job. + :vartype job_stages: list[~data_box_management_client.models.JobStages] + :param contact_details: Required. Contact details for notification and shipping. + :type contact_details: ~data_box_management_client.models.ContactDetails + :param shipping_address: Shipping address of the customer. + :type shipping_address: ~data_box_management_client.models.ShippingAddress + :ivar delivery_package: Delivery package shipping details. + :vartype delivery_package: ~data_box_management_client.models.PackageShippingDetails + :ivar return_package: Return package shipping details. + :vartype return_package: ~data_box_management_client.models.PackageShippingDetails + :param data_import_details: Details of the data to be imported into azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param data_export_details: Details of the data to be exported from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] + :param job_details_type: Required. Indicates the type of job details.Constant filled by server. + Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", "DataBoxCustomerDisk". + :type job_details_type: str or ~data_box_management_client.models.ClassDiscriminator + :param preferences: Preferences for the order. + :type preferences: ~data_box_management_client.models.Preferences + :ivar copy_log_details: List of copy log details. + :vartype copy_log_details: list[~data_box_management_client.models.CopyLogDetails] + :ivar reverse_shipment_label_sas_key: Shared access key to download the return shipment label. + :vartype reverse_shipment_label_sas_key: str + :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. + :vartype chain_of_custody_sas_key: str + :param key_encryption_key: Details about which key encryption type is being used. + :type key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey + :param expected_data_size_in_tera_bytes: The expected size of the data, which needs to be + transferred in this job, in terabytes. + :type expected_data_size_in_tera_bytes: int + :ivar actions: Available actions on the job. + :vartype actions: list[str or ~data_box_management_client.models.CustomerResolutionCode] + :ivar last_mitigation_action_on_job: Last mitigation action performed on the job. + :vartype last_mitigation_action_on_job: + ~data_box_management_client.models.LastMitigationActionOnJob + :ivar copy_progress: Copy progress per storage account. + :vartype copy_progress: list[~data_box_management_client.models.CopyProgress] + :param device_password: Set Device password for unlocking Databox. Should not be passed for + TransferType:ExportFromAzure jobs. If this is not passed, the service will generate password + itself. This will not be returned in Get Call. Password Requirements : Password must be + minimum of 12 and maximum of 64 characters. Password must have at least one uppercase alphabet, + one number and one special character. Password cannot have the following characters : IilLoO0 + Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. + :type device_password: str + """ + + _validation = { + 'job_stages': {'readonly': True}, + 'contact_details': {'required': True}, + 'delivery_package': {'readonly': True}, + 'return_package': {'readonly': True}, + 'job_details_type': {'required': True}, + 'copy_log_details': {'readonly': True}, + 'reverse_shipment_label_sas_key': {'readonly': True}, + 'chain_of_custody_sas_key': {'readonly': True}, + 'actions': {'readonly': True}, + 'last_mitigation_action_on_job': {'readonly': True}, + 'copy_progress': {'readonly': True}, + } + + _attribute_map = { + 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, + 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, + 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, + 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, + 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, + 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, + 'preferences': {'key': 'preferences', 'type': 'Preferences'}, + 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, + 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, + 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, + 'expected_data_size_in_tera_bytes': {'key': 'expectedDataSizeInTeraBytes', 'type': 'int'}, + 'actions': {'key': 'actions', 'type': '[str]'}, + 'last_mitigation_action_on_job': {'key': 'lastMitigationActionOnJob', 'type': 'LastMitigationActionOnJob'}, + 'copy_progress': {'key': 'copyProgress', 'type': '[CopyProgress]'}, + 'device_password': {'key': 'devicePassword', 'type': 'str'}, + } + + def __init__( + self, + *, + contact_details: "ContactDetails", + shipping_address: Optional["ShippingAddress"] = None, + data_import_details: Optional[List["DataImportDetails"]] = None, + data_export_details: Optional[List["DataExportDetails"]] = None, + preferences: Optional["Preferences"] = None, + key_encryption_key: Optional["KeyEncryptionKey"] = None, + expected_data_size_in_tera_bytes: Optional[int] = None, + device_password: Optional[str] = None, + **kwargs + ): + super(DataBoxJobDetails, self).__init__(contact_details=contact_details, shipping_address=shipping_address, data_import_details=data_import_details, data_export_details=data_export_details, preferences=preferences, key_encryption_key=key_encryption_key, expected_data_size_in_tera_bytes=expected_data_size_in_tera_bytes, **kwargs) + self.job_details_type = 'DataBox' # type: str + self.copy_progress = None + self.device_password = device_password + + +class DataboxJobSecrets(JobSecrets): + """The secrets related to a databox job. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant + filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", + "DataBoxCustomerDisk". + :type job_secrets_type: str or ~data_box_management_client.models.ClassDiscriminator + :ivar dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. + :vartype dc_access_security_code: ~data_box_management_client.models.DcAccessSecurityCode + :ivar error: Error while fetching the secrets. + :vartype error: ~data_box_management_client.models.CloudError + :param pod_secrets: Contains the list of secret objects for a job. + :type pod_secrets: list[~data_box_management_client.models.DataBoxSecret] + """ + + _validation = { + 'job_secrets_type': {'required': True}, + 'dc_access_security_code': {'readonly': True}, + 'error': {'readonly': True}, + } + + _attribute_map = { + 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, + 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, + 'error': {'key': 'error', 'type': 'CloudError'}, + 'pod_secrets': {'key': 'podSecrets', 'type': '[DataBoxSecret]'}, + } + + def __init__( + self, + *, + pod_secrets: Optional[List["DataBoxSecret"]] = None, + **kwargs + ): + super(DataboxJobSecrets, self).__init__(**kwargs) + self.job_secrets_type = 'DataBox' # type: str + self.pod_secrets = pod_secrets + + +class ScheduleAvailabilityRequest(msrest.serialization.Model): + """Request body to get the availability for scheduling orders. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: DataBoxScheduleAvailabilityRequest, DiskScheduleAvailabilityRequest, HeavyScheduleAvailabilityRequest. + + All required parameters must be populated in order to send to Azure. + + :param storage_location: Required. Location for data transfer. For locations check: + https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. + :type storage_location: str + :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by + server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", + "DataBoxCustomerDisk". + :type sku_name: str or ~data_box_management_client.models.SkuName + :param country: Country in which storage location should be supported. + :type country: str + """ + + _validation = { + 'storage_location': {'required': True}, + 'sku_name': {'required': True}, + } + + _attribute_map = { + 'storage_location': {'key': 'storageLocation', 'type': 'str'}, + 'sku_name': {'key': 'skuName', 'type': 'str'}, + 'country': {'key': 'country', 'type': 'str'}, + } + + _subtype_map = { + 'sku_name': {'DataBox': 'DataBoxScheduleAvailabilityRequest', 'DataBoxDisk': 'DiskScheduleAvailabilityRequest', 'DataBoxHeavy': 'HeavyScheduleAvailabilityRequest'} + } + + def __init__( + self, + *, + storage_location: str, + country: Optional[str] = None, + **kwargs + ): + super(ScheduleAvailabilityRequest, self).__init__(**kwargs) + self.storage_location = storage_location + self.sku_name = None # type: Optional[str] + self.country = country + + +class DataBoxScheduleAvailabilityRequest(ScheduleAvailabilityRequest): + """Request body to get the availability for scheduling data box orders orders. + + All required parameters must be populated in order to send to Azure. + + :param storage_location: Required. Location for data transfer. For locations check: + https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. + :type storage_location: str + :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by + server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", + "DataBoxCustomerDisk". + :type sku_name: str or ~data_box_management_client.models.SkuName + :param country: Country in which storage location should be supported. + :type country: str + """ + + _validation = { + 'storage_location': {'required': True}, + 'sku_name': {'required': True}, + } + + _attribute_map = { + 'storage_location': {'key': 'storageLocation', 'type': 'str'}, + 'sku_name': {'key': 'skuName', 'type': 'str'}, + 'country': {'key': 'country', 'type': 'str'}, + } + + def __init__( + self, + *, + storage_location: str, + country: Optional[str] = None, + **kwargs + ): + super(DataBoxScheduleAvailabilityRequest, self).__init__(storage_location=storage_location, country=country, **kwargs) + self.sku_name = 'DataBox' # type: str + + +class DataBoxSecret(msrest.serialization.Model): + """The secrets related to a DataBox. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar device_serial_number: Serial number of the assigned device. + :vartype device_serial_number: str + :ivar device_password: Password for out of the box experience on device. + :vartype device_password: str + :ivar network_configurations: Network configuration of the appliance. + :vartype network_configurations: + list[~data_box_management_client.models.ApplianceNetworkConfiguration] + :ivar encoded_validation_cert_pub_key: The base 64 encoded public key to authenticate with the + device. + :vartype encoded_validation_cert_pub_key: str + :ivar account_credential_details: Per account level access credentials. + :vartype account_credential_details: + list[~data_box_management_client.models.AccountCredentialDetails] + """ + + _validation = { + 'device_serial_number': {'readonly': True}, + 'device_password': {'readonly': True}, + 'network_configurations': {'readonly': True}, + 'encoded_validation_cert_pub_key': {'readonly': True}, + 'account_credential_details': {'readonly': True}, + } + + _attribute_map = { + 'device_serial_number': {'key': 'deviceSerialNumber', 'type': 'str'}, + 'device_password': {'key': 'devicePassword', 'type': 'str'}, + 'network_configurations': {'key': 'networkConfigurations', 'type': '[ApplianceNetworkConfiguration]'}, + 'encoded_validation_cert_pub_key': {'key': 'encodedValidationCertPubKey', 'type': 'str'}, + 'account_credential_details': {'key': 'accountCredentialDetails', 'type': '[AccountCredentialDetails]'}, + } + + def __init__( + self, + **kwargs + ): + super(DataBoxSecret, self).__init__(**kwargs) + self.device_serial_number = None + self.device_password = None + self.network_configurations = None + self.encoded_validation_cert_pub_key = None + self.account_credential_details = None + + +class DatacenterAddressResponse(msrest.serialization.Model): + """Datacenter address for given storage location. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: DatacenterAddressInstructionResponse, DatacenterAddressLocationResponse. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param datacenter_address_type: Required. Data center address type.Constant filled by server. + Possible values include: "DatacenterAddressLocation", "DatacenterAddressInstruction". + :type datacenter_address_type: str or ~data_box_management_client.models.DatacenterAddressType + :ivar supported_carriers_for_return_shipment: List of supported carriers for return shipment. + :vartype supported_carriers_for_return_shipment: list[str] + """ + + _validation = { + 'datacenter_address_type': {'required': True}, + 'supported_carriers_for_return_shipment': {'readonly': True}, + } + + _attribute_map = { + 'datacenter_address_type': {'key': 'datacenterAddressType', 'type': 'str'}, + 'supported_carriers_for_return_shipment': {'key': 'supportedCarriersForReturnShipment', 'type': '[str]'}, + } + + _subtype_map = { + 'datacenter_address_type': {'DatacenterAddressInstruction': 'DatacenterAddressInstructionResponse', 'DatacenterAddressLocation': 'DatacenterAddressLocationResponse'} + } + + def __init__( + self, + **kwargs + ): + super(DatacenterAddressResponse, self).__init__(**kwargs) + self.datacenter_address_type = None # type: Optional[str] + self.supported_carriers_for_return_shipment = None + + +class DatacenterAddressInstructionResponse(DatacenterAddressResponse): + """Datacenter address for given storage location. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param datacenter_address_type: Required. Data center address type.Constant filled by server. + Possible values include: "DatacenterAddressLocation", "DatacenterAddressInstruction". + :type datacenter_address_type: str or ~data_box_management_client.models.DatacenterAddressType + :ivar supported_carriers_for_return_shipment: List of supported carriers for return shipment. + :vartype supported_carriers_for_return_shipment: list[str] + :ivar communication_instruction: Data center communication instruction. + :vartype communication_instruction: str + """ + + _validation = { + 'datacenter_address_type': {'required': True}, + 'supported_carriers_for_return_shipment': {'readonly': True}, + 'communication_instruction': {'readonly': True}, + } + + _attribute_map = { + 'datacenter_address_type': {'key': 'datacenterAddressType', 'type': 'str'}, + 'supported_carriers_for_return_shipment': {'key': 'supportedCarriersForReturnShipment', 'type': '[str]'}, + 'communication_instruction': {'key': 'communicationInstruction', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DatacenterAddressInstructionResponse, self).__init__(**kwargs) + self.datacenter_address_type = 'DatacenterAddressInstruction' # type: str + self.communication_instruction = None + + +class DatacenterAddressLocationResponse(DatacenterAddressResponse): + """Datacenter address for given storage location. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param datacenter_address_type: Required. Data center address type.Constant filled by server. + Possible values include: "DatacenterAddressLocation", "DatacenterAddressInstruction". + :type datacenter_address_type: str or ~data_box_management_client.models.DatacenterAddressType + :ivar supported_carriers_for_return_shipment: List of supported carriers for return shipment. + :vartype supported_carriers_for_return_shipment: list[str] + :ivar contact_person_name: Contact person name. + :vartype contact_person_name: str + :ivar company: Company name. + :vartype company: str + :ivar street1: Street address line 1. + :vartype street1: str + :ivar street2: Street address line 2. + :vartype street2: str + :ivar street3: Street address line 3. + :vartype street3: str + :ivar city: City name. + :vartype city: str + :ivar state: name of the state. + :vartype state: str + :ivar zip: Zip code. + :vartype zip: str + :ivar country: name of the country. + :vartype country: str + :ivar phone: Phone number. + :vartype phone: str + :ivar phone_extension: Phone extension. + :vartype phone_extension: str + :ivar address_type: Address type. + :vartype address_type: str + :ivar additional_shipping_information: Special instruction for shipping. + :vartype additional_shipping_information: str + """ + + _validation = { + 'datacenter_address_type': {'required': True}, + 'supported_carriers_for_return_shipment': {'readonly': True}, + 'contact_person_name': {'readonly': True}, + 'company': {'readonly': True}, + 'street1': {'readonly': True}, + 'street2': {'readonly': True}, + 'street3': {'readonly': True}, + 'city': {'readonly': True}, + 'state': {'readonly': True}, + 'zip': {'readonly': True}, + 'country': {'readonly': True}, + 'phone': {'readonly': True}, + 'phone_extension': {'readonly': True}, + 'address_type': {'readonly': True}, + 'additional_shipping_information': {'readonly': True}, + } + + _attribute_map = { + 'datacenter_address_type': {'key': 'datacenterAddressType', 'type': 'str'}, + 'supported_carriers_for_return_shipment': {'key': 'supportedCarriersForReturnShipment', 'type': '[str]'}, + 'contact_person_name': {'key': 'contactPersonName', 'type': 'str'}, + 'company': {'key': 'company', 'type': 'str'}, + 'street1': {'key': 'street1', 'type': 'str'}, + 'street2': {'key': 'street2', 'type': 'str'}, + 'street3': {'key': 'street3', 'type': 'str'}, + 'city': {'key': 'city', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'zip': {'key': 'zip', 'type': 'str'}, + 'country': {'key': 'country', 'type': 'str'}, + 'phone': {'key': 'phone', 'type': 'str'}, + 'phone_extension': {'key': 'phoneExtension', 'type': 'str'}, + 'address_type': {'key': 'addressType', 'type': 'str'}, + 'additional_shipping_information': {'key': 'additionalShippingInformation', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DatacenterAddressLocationResponse, self).__init__(**kwargs) + self.datacenter_address_type = 'DatacenterAddressLocation' # type: str + self.contact_person_name = None + self.company = None + self.street1 = None + self.street2 = None + self.street3 = None + self.city = None + self.state = None + self.zip = None + self.country = None + self.phone = None + self.phone_extension = None + self.address_type = None + self.additional_shipping_information = None + + +class DatacenterAddressRequest(msrest.serialization.Model): + """Request body to get the datacenter address. + + All required parameters must be populated in order to send to Azure. + + :param storage_location: Required. Storage location. For locations check: + https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. + :type storage_location: str + :param sku_name: Required. Sku Name for which the data center address requested. Possible + values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", "DataBoxCustomerDisk". + :type sku_name: str or ~data_box_management_client.models.SkuName + """ + + _validation = { + 'storage_location': {'required': True}, + 'sku_name': {'required': True}, + } + + _attribute_map = { + 'storage_location': {'key': 'storageLocation', 'type': 'str'}, + 'sku_name': {'key': 'skuName', 'type': 'str'}, + } + + def __init__( + self, + *, + storage_location: str, + sku_name: Union[str, "SkuName"], + **kwargs + ): + super(DatacenterAddressRequest, self).__init__(**kwargs) + self.storage_location = storage_location + self.sku_name = sku_name + + +class DataExportDetails(msrest.serialization.Model): + """Details of the data to be used for exporting data from azure. + + All required parameters must be populated in order to send to Azure. + + :param transfer_configuration: Required. Configuration for the data transfer. + :type transfer_configuration: ~data_box_management_client.models.TransferConfiguration + :param log_collection_level: Level of the logs to be collected. Possible values include: + "Error", "Verbose". Default value: "Error". + :type log_collection_level: str or ~data_box_management_client.models.LogCollectionLevel + :param account_details: Required. Account details of the data to be transferred. + :type account_details: ~data_box_management_client.models.DataAccountDetails + """ + + _validation = { + 'transfer_configuration': {'required': True}, + 'account_details': {'required': True}, + } + + _attribute_map = { + 'transfer_configuration': {'key': 'transferConfiguration', 'type': 'TransferConfiguration'}, + 'log_collection_level': {'key': 'logCollectionLevel', 'type': 'str'}, + 'account_details': {'key': 'accountDetails', 'type': 'DataAccountDetails'}, + } + + def __init__( + self, + *, + transfer_configuration: "TransferConfiguration", + account_details: "DataAccountDetails", + log_collection_level: Optional[Union[str, "LogCollectionLevel"]] = "Error", + **kwargs + ): + super(DataExportDetails, self).__init__(**kwargs) + self.transfer_configuration = transfer_configuration + self.log_collection_level = log_collection_level + self.account_details = account_details + + +class DataImportDetails(msrest.serialization.Model): + """Details of the data to be used for importing data to azure. + + All required parameters must be populated in order to send to Azure. + + :param account_details: Required. Account details of the data to be transferred. + :type account_details: ~data_box_management_client.models.DataAccountDetails + """ + + _validation = { + 'account_details': {'required': True}, + } + + _attribute_map = { + 'account_details': {'key': 'accountDetails', 'type': 'DataAccountDetails'}, + } + + def __init__( + self, + *, + account_details: "DataAccountDetails", + **kwargs + ): + super(DataImportDetails, self).__init__(**kwargs) + self.account_details = account_details + + +class DataLocationToServiceLocationMap(msrest.serialization.Model): + """Map of data location to service location. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar data_location: Location of the data. + :vartype data_location: str + :ivar service_location: Location of the service. + :vartype service_location: str + """ + + _validation = { + 'data_location': {'readonly': True}, + 'service_location': {'readonly': True}, + } + + _attribute_map = { + 'data_location': {'key': 'dataLocation', 'type': 'str'}, + 'service_location': {'key': 'serviceLocation', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DataLocationToServiceLocationMap, self).__init__(**kwargs) + self.data_location = None + self.service_location = None + + +class DataTransferDetailsValidationRequest(ValidationInputRequest): + """Request to validate export and import data details. + + All required parameters must be populated in order to send to Azure. + + :param validation_type: Required. Identifies the type of validation request.Constant filled by + server. Possible values include: "ValidateAddress", + "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator + :param data_export_details: List of DataTransfer details to be used to export data from azure. + :type data_export_details: list[~data_box_management_client.models.DataExportDetails] + :param data_import_details: List of DataTransfer details to be used to import data to azure. + :type data_import_details: list[~data_box_management_client.models.DataImportDetails] + :param device_type: Required. Device type. Possible values include: "DataBox", "DataBoxDisk", + "DataBoxHeavy", "DataBoxCustomerDisk". + :type device_type: str or ~data_box_management_client.models.SkuName + :param transfer_type: Required. Type of the transfer. Possible values include: "ImportToAzure", + "ExportFromAzure". + :type transfer_type: str or ~data_box_management_client.models.TransferType + """ + + _validation = { + 'validation_type': {'required': True}, + 'device_type': {'required': True}, + 'transfer_type': {'required': True}, + } + + _attribute_map = { + 'validation_type': {'key': 'validationType', 'type': 'str'}, + 'data_export_details': {'key': 'dataExportDetails', 'type': '[DataExportDetails]'}, + 'data_import_details': {'key': 'dataImportDetails', 'type': '[DataImportDetails]'}, + 'device_type': {'key': 'deviceType', 'type': 'str'}, + 'transfer_type': {'key': 'transferType', 'type': 'str'}, + } + + def __init__( + self, + *, + device_type: Union[str, "SkuName"], + transfer_type: Union[str, "TransferType"], + data_export_details: Optional[List["DataExportDetails"]] = None, + data_import_details: Optional[List["DataImportDetails"]] = None, + **kwargs + ): + super(DataTransferDetailsValidationRequest, self).__init__(**kwargs) + self.validation_type = 'ValidateDataTransferDetails' # type: str + self.data_export_details = data_export_details + self.data_import_details = data_import_details + self.device_type = device_type + self.transfer_type = transfer_type + + +class DataTransferDetailsValidationResponseProperties(ValidationInputResponse): + """Properties of data transfer details validation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param validation_type: Required. Identifies the type of validation response.Constant filled by + server. Possible values include: "ValidateAddress", + "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator + :ivar error: Error code and message of validation response. + :vartype error: ~data_box_management_client.models.CloudError + :ivar status: Data transfer details validation status. Possible values include: "Valid", + "Invalid", "Skipped". + :vartype status: str or ~data_box_management_client.models.ValidationStatus + """ + + _validation = { + 'validation_type': {'required': True}, + 'error': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'validation_type': {'key': 'validationType', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'CloudError'}, + 'status': {'key': 'status', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DataTransferDetailsValidationResponseProperties, self).__init__(**kwargs) + self.validation_type = 'ValidateDataTransferDetails' # type: str + self.status = None + + +class DcAccessSecurityCode(msrest.serialization.Model): + """Dc access security code. + + :param reverse_dc_access_code: Reverse Dc access security code. + :type reverse_dc_access_code: str + :param forward_dc_access_code: Forward Dc access security code. + :type forward_dc_access_code: str + """ + + _attribute_map = { + 'reverse_dc_access_code': {'key': 'reverseDCAccessCode', 'type': 'str'}, + 'forward_dc_access_code': {'key': 'forwardDCAccessCode', 'type': 'str'}, + } + + def __init__( + self, + *, + reverse_dc_access_code: Optional[str] = None, + forward_dc_access_code: Optional[str] = None, + **kwargs + ): + super(DcAccessSecurityCode, self).__init__(**kwargs) + self.reverse_dc_access_code = reverse_dc_access_code + self.forward_dc_access_code = forward_dc_access_code + + +class Details(msrest.serialization.Model): + """Details. + + All required parameters must be populated in order to send to Azure. + + :param code: Required. + :type code: str + :param message: Required. + :type message: str + """ + + _validation = { + 'code': {'required': True}, + 'message': {'required': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__( + self, + *, + code: str, + message: str, + **kwargs + ): + super(Details, self).__init__(**kwargs) + self.code = code + self.message = message + + +class DiskScheduleAvailabilityRequest(ScheduleAvailabilityRequest): + """Request body to get the availability for scheduling disk orders. + + All required parameters must be populated in order to send to Azure. + + :param storage_location: Required. Location for data transfer. For locations check: + https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. + :type storage_location: str + :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by + server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", + "DataBoxCustomerDisk". + :type sku_name: str or ~data_box_management_client.models.SkuName + :param country: Country in which storage location should be supported. + :type country: str + :param expected_data_size_in_tera_bytes: Required. The expected size of the data, which needs + to be transferred in this job, in terabytes. + :type expected_data_size_in_tera_bytes: int + """ + + _validation = { + 'storage_location': {'required': True}, + 'sku_name': {'required': True}, + 'expected_data_size_in_tera_bytes': {'required': True}, + } + + _attribute_map = { + 'storage_location': {'key': 'storageLocation', 'type': 'str'}, + 'sku_name': {'key': 'skuName', 'type': 'str'}, + 'country': {'key': 'country', 'type': 'str'}, + 'expected_data_size_in_tera_bytes': {'key': 'expectedDataSizeInTeraBytes', 'type': 'int'}, + } + + def __init__( + self, + *, + storage_location: str, + expected_data_size_in_tera_bytes: int, + country: Optional[str] = None, + **kwargs + ): + super(DiskScheduleAvailabilityRequest, self).__init__(storage_location=storage_location, country=country, **kwargs) + self.sku_name = 'DataBoxDisk' # type: str + self.expected_data_size_in_tera_bytes = expected_data_size_in_tera_bytes + + +class DiskSecret(msrest.serialization.Model): + """Contains all the secrets of a Disk. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar disk_serial_number: Serial number of the assigned disk. + :vartype disk_serial_number: str + :ivar bit_locker_key: Bit Locker key of the disk which can be used to unlock the disk to copy + data. + :vartype bit_locker_key: str + """ + + _validation = { + 'disk_serial_number': {'readonly': True}, + 'bit_locker_key': {'readonly': True}, + } + + _attribute_map = { + 'disk_serial_number': {'key': 'diskSerialNumber', 'type': 'str'}, + 'bit_locker_key': {'key': 'bitLockerKey', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DiskSecret, self).__init__(**kwargs) + self.disk_serial_number = None + self.bit_locker_key = None + + +class EncryptionPreferences(msrest.serialization.Model): + """Preferences related to the Encryption. + + :param double_encryption: Defines secondary layer of software-based encryption enablement. + Possible values include: "Enabled", "Disabled". Default value: "Disabled". + :type double_encryption: str or ~data_box_management_client.models.DoubleEncryption + """ + + _attribute_map = { + 'double_encryption': {'key': 'doubleEncryption', 'type': 'str'}, + } + + def __init__( + self, + *, + double_encryption: Optional[Union[str, "DoubleEncryption"]] = "Disabled", + **kwargs + ): + super(EncryptionPreferences, self).__init__(**kwargs) + self.double_encryption = double_encryption + + +class ErrorDetail(msrest.serialization.Model): + """ErrorDetail. + + All required parameters must be populated in order to send to Azure. + + :param code: Required. + :type code: str + :param message: Required. + :type message: str + :param details: + :type details: list[~data_box_management_client.models.Details] + :param target: + :type target: str + """ + + _validation = { + 'code': {'required': True}, + 'message': {'required': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[Details]'}, + 'target': {'key': 'target', 'type': 'str'}, + } + + def __init__( + self, + *, + code: str, + message: str, + details: Optional[List["Details"]] = None, + target: Optional[str] = None, + **kwargs + ): + super(ErrorDetail, self).__init__(**kwargs) + self.code = code + self.message = message + self.details = details + self.target = target + + +class ExportDiskDetails(msrest.serialization.Model): + """Export disk details. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar manifest_file: Manifest file of drive. + :vartype manifest_file: str + :ivar manifest_hash: Manifest file of drive. + :vartype manifest_hash: str + :ivar disk_hash: Hash of the disk. + :vartype disk_hash: str + """ + + _validation = { + 'manifest_file': {'readonly': True}, + 'manifest_hash': {'readonly': True}, + 'disk_hash': {'readonly': True}, + } + + _attribute_map = { + 'manifest_file': {'key': 'manifestFile', 'type': 'str'}, + 'manifest_hash': {'key': 'manifestHash', 'type': 'str'}, + 'disk_hash': {'key': 'diskHash', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ExportDiskDetails, self).__init__(**kwargs) + self.manifest_file = None + self.manifest_hash = None + self.disk_hash = None + + +class FilterFileDetails(msrest.serialization.Model): + """Details of the filter files to be used for data transfer. + + All required parameters must be populated in order to send to Azure. + + :param filter_file_type: Required. Type of the filter file. Possible values include: + "AzureBlob", "AzureFile". + :type filter_file_type: str or ~data_box_management_client.models.FilterFileType + :param filter_file_path: Required. Path of the file that contains the details of all items to + transfer. + :type filter_file_path: str + """ + + _validation = { + 'filter_file_type': {'required': True}, + 'filter_file_path': {'required': True}, + } + + _attribute_map = { + 'filter_file_type': {'key': 'filterFileType', 'type': 'str'}, + 'filter_file_path': {'key': 'filterFilePath', 'type': 'str'}, + } + + def __init__( + self, + *, + filter_file_type: Union[str, "FilterFileType"], + filter_file_path: str, + **kwargs + ): + super(FilterFileDetails, self).__init__(**kwargs) + self.filter_file_type = filter_file_type + self.filter_file_path = filter_file_path + + +class HeavyScheduleAvailabilityRequest(ScheduleAvailabilityRequest): + """Request body to get the availability for scheduling heavy orders. + + All required parameters must be populated in order to send to Azure. + + :param storage_location: Required. Location for data transfer. For locations check: + https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. + :type storage_location: str + :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by + server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy", + "DataBoxCustomerDisk". + :type sku_name: str or ~data_box_management_client.models.SkuName + :param country: Country in which storage location should be supported. + :type country: str + """ + + _validation = { + 'storage_location': {'required': True}, + 'sku_name': {'required': True}, + } + + _attribute_map = { + 'storage_location': {'key': 'storageLocation', 'type': 'str'}, + 'sku_name': {'key': 'skuName', 'type': 'str'}, + 'country': {'key': 'country', 'type': 'str'}, + } + + def __init__( + self, + *, + storage_location: str, + country: Optional[str] = None, + **kwargs + ): + super(HeavyScheduleAvailabilityRequest, self).__init__(storage_location=storage_location, country=country, **kwargs) + self.sku_name = 'DataBoxHeavy' # type: str + + +class IdentityProperties(msrest.serialization.Model): + """Managed identity properties. + + :param type: Managed service identity type. + :type type: str + :param user_assigned: User assigned identity properties. + :type user_assigned: ~data_box_management_client.models.UserAssignedProperties + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'user_assigned': {'key': 'userAssigned', 'type': 'UserAssignedProperties'}, + } + + def __init__( + self, + *, + type: Optional[str] = None, + user_assigned: Optional["UserAssignedProperties"] = None, + **kwargs + ): + super(IdentityProperties, self).__init__(**kwargs) + self.type = type + self.user_assigned = user_assigned + + +class ImportDiskDetails(msrest.serialization.Model): + """Import disk details. + + All required parameters must be populated in order to send to Azure. + + :param manifest_file: Required. Manifest file of drive. + :type manifest_file: str + :param manifest_hash: Required. Manifest file of drive. + :type manifest_hash: str + :param bit_locker_key: Required. BitLocker key of drive. + :type bit_locker_key: str + :param disk_hash: Required. Hash of the disk. + :type disk_hash: str + """ + + _validation = { + 'manifest_file': {'required': True}, + 'manifest_hash': {'required': True}, + 'bit_locker_key': {'required': True}, + 'disk_hash': {'required': True}, + } + + _attribute_map = { + 'manifest_file': {'key': 'manifestFile', 'type': 'str'}, + 'manifest_hash': {'key': 'manifestHash', 'type': 'str'}, + 'bit_locker_key': {'key': 'bitLockerKey', 'type': 'str'}, + 'disk_hash': {'key': 'diskHash', 'type': 'str'}, + } + + def __init__( + self, + *, + manifest_file: str, + manifest_hash: str, + bit_locker_key: str, + disk_hash: str, + **kwargs + ): + super(ImportDiskDetails, self).__init__(**kwargs) + self.manifest_file = manifest_file + self.manifest_hash = manifest_hash + self.bit_locker_key = bit_locker_key + self.disk_hash = disk_hash + + +class JobDeliveryInfo(msrest.serialization.Model): + """Additional delivery info. + + :param scheduled_date_time: Scheduled date time. + :type scheduled_date_time: ~datetime.datetime + """ + + _attribute_map = { + 'scheduled_date_time': {'key': 'scheduledDateTime', 'type': 'iso-8601'}, + } + + def __init__( + self, + *, + scheduled_date_time: Optional[datetime.datetime] = None, + **kwargs + ): + super(JobDeliveryInfo, self).__init__(**kwargs) + self.scheduled_date_time = scheduled_date_time + + +class Resource(msrest.serialization.Model): + """Model of the Resource. + + All required parameters must be populated in order to send to Azure. + + :param location: Required. The location of the resource. This will be one of the supported and + registered Azure Regions (e.g. West US, East US, Southeast Asia, etc.). The region of a + resource cannot be changed once it is created, but if an identical region is specified on + update the request will succeed. + :type location: str + :param tags: A set of tags. The list of key value pairs that describe the resource. These tags + can be used in viewing and grouping this resource (across resource groups). + :type tags: dict[str, str] + :param sku: Required. The sku type. + :type sku: ~data_box_management_client.models.Sku + :param identity: Msi identity of the resource. + :type identity: ~data_box_management_client.models.ResourceIdentity + """ + + _validation = { + 'location': {'required': True}, + 'sku': {'required': True}, + } + + _attribute_map = { + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'identity': {'key': 'identity', 'type': 'ResourceIdentity'}, + } + + def __init__( + self, + *, + location: str, + sku: "Sku", + tags: Optional[Dict[str, str]] = None, + identity: Optional["ResourceIdentity"] = None, + **kwargs + ): + super(Resource, self).__init__(**kwargs) + self.location = location + self.tags = tags + self.sku = sku + self.identity = identity + + +class JobResource(Resource): + """Job Resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param location: Required. The location of the resource. This will be one of the supported and + registered Azure Regions (e.g. West US, East US, Southeast Asia, etc.). The region of a + resource cannot be changed once it is created, but if an identical region is specified on + update the request will succeed. + :type location: str + :param tags: A set of tags. The list of key value pairs that describe the resource. These tags + can be used in viewing and grouping this resource (across resource groups). + :type tags: dict[str, str] + :param sku: Required. The sku type. + :type sku: ~data_box_management_client.models.Sku + :param identity: Msi identity of the resource. + :type identity: ~data_box_management_client.models.ResourceIdentity + :ivar name: Name of the object. + :vartype name: str + :ivar id: Id of the object. + :vartype id: str + :ivar type: Type of the object. + :vartype type: str + :ivar system_data: Metadata pertaining to creation and last modification of the resource. + :vartype system_data: ~data_box_management_client.models.SystemData + :param transfer_type: Required. Type of the data transfer. Possible values include: + "ImportToAzure", "ExportFromAzure". + :type transfer_type: str or ~data_box_management_client.models.TransferType + :ivar is_cancellable: Describes whether the job is cancellable or not. + :vartype is_cancellable: bool + :ivar is_deletable: Describes whether the job is deletable or not. + :vartype is_deletable: bool + :ivar is_shipping_address_editable: Describes whether the shipping address is editable or not. + :vartype is_shipping_address_editable: bool + :ivar is_prepare_to_ship_enabled: Is Prepare To Ship Enabled on this job. + :vartype is_prepare_to_ship_enabled: bool + :ivar status: Name of the stage which is in progress. Possible values include: "DeviceOrdered", + "DevicePrepared", "Dispatched", "Delivered", "PickedUp", "AtAzureDC", "DataCopy", "Completed", + "CompletedWithErrors", "Cancelled", "Failed_IssueReportedAtCustomer", + "Failed_IssueDetectedAtAzureDC", "Aborted", "CompletedWithWarnings", + "ReadyToDispatchFromAzureDC", "ReadyToReceiveAtAzureDC", "Created", "Shipping", "Packaging". + :vartype status: str or ~data_box_management_client.models.StageName + :ivar start_time: Time at which the job was started in UTC ISO 8601 format. + :vartype start_time: ~datetime.datetime + :ivar error: Top level error for the job. + :vartype error: ~data_box_management_client.models.CloudError + :param details: Details of a job run. This field will only be sent for expand details filter. + :type details: ~data_box_management_client.models.JobDetails + :ivar cancellation_reason: Reason for cancellation. + :vartype cancellation_reason: str + :param delivery_type: Delivery type of Job. Possible values include: "NonScheduled", + "Scheduled". Default value: "NonScheduled". + :type delivery_type: str or ~data_box_management_client.models.JobDeliveryType + :param delivery_info: Delivery Info of Job. + :type delivery_info: ~data_box_management_client.models.JobDeliveryInfo + :ivar is_cancellable_without_fee: Flag to indicate cancellation of scheduled job. + :vartype is_cancellable_without_fee: bool + """ + + _validation = { + 'location': {'required': True}, + 'sku': {'required': True}, + 'name': {'readonly': True}, + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'system_data': {'readonly': True}, + 'transfer_type': {'required': True}, + 'is_cancellable': {'readonly': True}, + 'is_deletable': {'readonly': True}, + 'is_shipping_address_editable': {'readonly': True}, + 'is_prepare_to_ship_enabled': {'readonly': True}, + 'status': {'readonly': True}, + 'start_time': {'readonly': True}, + 'error': {'readonly': True}, + 'cancellation_reason': {'readonly': True}, + 'is_cancellable_without_fee': {'readonly': True}, + } + + _attribute_map = { + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'identity': {'key': 'identity', 'type': 'ResourceIdentity'}, + 'name': {'key': 'name', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'transfer_type': {'key': 'properties.transferType', 'type': 'str'}, + 'is_cancellable': {'key': 'properties.isCancellable', 'type': 'bool'}, + 'is_deletable': {'key': 'properties.isDeletable', 'type': 'bool'}, + 'is_shipping_address_editable': {'key': 'properties.isShippingAddressEditable', 'type': 'bool'}, + 'is_prepare_to_ship_enabled': {'key': 'properties.isPrepareToShipEnabled', 'type': 'bool'}, + 'status': {'key': 'properties.status', 'type': 'str'}, + 'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'}, + 'error': {'key': 'properties.error', 'type': 'CloudError'}, + 'details': {'key': 'properties.details', 'type': 'JobDetails'}, + 'cancellation_reason': {'key': 'properties.cancellationReason', 'type': 'str'}, + 'delivery_type': {'key': 'properties.deliveryType', 'type': 'str'}, + 'delivery_info': {'key': 'properties.deliveryInfo', 'type': 'JobDeliveryInfo'}, + 'is_cancellable_without_fee': {'key': 'properties.isCancellableWithoutFee', 'type': 'bool'}, + } + + def __init__( + self, + *, + location: str, + sku: "Sku", + transfer_type: Union[str, "TransferType"], + tags: Optional[Dict[str, str]] = None, + identity: Optional["ResourceIdentity"] = None, + details: Optional["JobDetails"] = None, + delivery_type: Optional[Union[str, "JobDeliveryType"]] = "NonScheduled", + delivery_info: Optional["JobDeliveryInfo"] = None, + **kwargs + ): + super(JobResource, self).__init__(location=location, tags=tags, sku=sku, identity=identity, **kwargs) + self.name = None + self.id = None + self.type = None + self.system_data = None + self.transfer_type = transfer_type + self.is_cancellable = None + self.is_deletable = None + self.is_shipping_address_editable = None + self.is_prepare_to_ship_enabled = None + self.status = None + self.start_time = None + self.error = None + self.details = details + self.cancellation_reason = None + self.delivery_type = delivery_type + self.delivery_info = delivery_info + self.is_cancellable_without_fee = None + + +class JobResourceList(msrest.serialization.Model): + """Job Resource Collection. + + :param value: List of job resources. + :type value: list[~data_box_management_client.models.JobResource] + :param next_link: Link for the next set of job resources. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[JobResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: Optional[List["JobResource"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + super(JobResourceList, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class JobResourceUpdateParameter(msrest.serialization.Model): + """The JobResourceUpdateParameter. + + :param tags: A set of tags. The list of key value pairs that describe the resource. These tags + can be used in viewing and grouping this resource (across resource groups). + :type tags: dict[str, str] + :param identity: Msi identity of the resource. + :type identity: ~data_box_management_client.models.ResourceIdentity + :param details: Details of a job to be updated. + :type details: ~data_box_management_client.models.UpdateJobDetails + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + 'identity': {'key': 'identity', 'type': 'ResourceIdentity'}, + 'details': {'key': 'properties.details', 'type': 'UpdateJobDetails'}, + } + + def __init__( + self, + *, + tags: Optional[Dict[str, str]] = None, + identity: Optional["ResourceIdentity"] = None, + details: Optional["UpdateJobDetails"] = None, + **kwargs + ): + super(JobResourceUpdateParameter, self).__init__(**kwargs) + self.tags = tags + self.identity = identity + self.details = details + + +class JobStages(msrest.serialization.Model): + """Job stages. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar stage_name: Name of the job stage. Possible values include: "DeviceOrdered", + "DevicePrepared", "Dispatched", "Delivered", "PickedUp", "AtAzureDC", "DataCopy", "Completed", + "CompletedWithErrors", "Cancelled", "Failed_IssueReportedAtCustomer", + "Failed_IssueDetectedAtAzureDC", "Aborted", "CompletedWithWarnings", + "ReadyToDispatchFromAzureDC", "ReadyToReceiveAtAzureDC", "Created", "Shipping", "Packaging". + :vartype stage_name: str or ~data_box_management_client.models.StageName + :ivar display_name: Display name of the job stage. + :vartype display_name: str + :ivar stage_status: Status of the job stage. Possible values include: "None", "InProgress", + "Succeeded", "Failed", "Cancelled", "Cancelling", "SucceededWithErrors", + "WaitingForCustomerAction", "SucceededWithWarnings", "WaitingForCustomerActionForKek", + "WaitingForCustomerActionForCleanUp", "CustomerActionPerformedForCleanUp". + :vartype stage_status: str or ~data_box_management_client.models.StageStatus + :ivar stage_time: Time for the job stage in UTC ISO 8601 format. + :vartype stage_time: ~datetime.datetime + :ivar job_stage_details: Job Stage Details. + :vartype job_stage_details: object + """ + + _validation = { + 'stage_name': {'readonly': True}, + 'display_name': {'readonly': True}, + 'stage_status': {'readonly': True}, + 'stage_time': {'readonly': True}, + 'job_stage_details': {'readonly': True}, + } + + _attribute_map = { + 'stage_name': {'key': 'stageName', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'stage_status': {'key': 'stageStatus', 'type': 'str'}, + 'stage_time': {'key': 'stageTime', 'type': 'iso-8601'}, + 'job_stage_details': {'key': 'jobStageDetails', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(JobStages, self).__init__(**kwargs) + self.stage_name = None + self.display_name = None + self.stage_status = None + self.stage_time = None + self.job_stage_details = None + + +class KeyEncryptionKey(msrest.serialization.Model): + """Encryption key containing details about key to encrypt different keys. + + All required parameters must be populated in order to send to Azure. + + :param kek_type: Required. Type of encryption key used for key encryption. Possible values + include: "MicrosoftManaged", "CustomerManaged". Default value: "MicrosoftManaged". + :type kek_type: str or ~data_box_management_client.models.KekType + :param identity_properties: Managed identity properties used for key encryption. + :type identity_properties: ~data_box_management_client.models.IdentityProperties + :param kek_url: Key encryption key. It is required in case of Customer managed KekType. + :type kek_url: str + :param kek_vault_resource_id: Kek vault resource id. It is required in case of Customer managed + KekType. + :type kek_vault_resource_id: str + """ + + _validation = { + 'kek_type': {'required': True}, + } + + _attribute_map = { + 'kek_type': {'key': 'kekType', 'type': 'str'}, + 'identity_properties': {'key': 'identityProperties', 'type': 'IdentityProperties'}, + 'kek_url': {'key': 'kekUrl', 'type': 'str'}, + 'kek_vault_resource_id': {'key': 'kekVaultResourceID', 'type': 'str'}, + } + + def __init__( + self, + *, + kek_type: Union[str, "KekType"] = "MicrosoftManaged", + identity_properties: Optional["IdentityProperties"] = None, + kek_url: Optional[str] = None, + kek_vault_resource_id: Optional[str] = None, + **kwargs + ): + super(KeyEncryptionKey, self).__init__(**kwargs) + self.kek_type = kek_type + self.identity_properties = identity_properties + self.kek_url = kek_url + self.kek_vault_resource_id = kek_vault_resource_id + + +class LastMitigationActionOnJob(msrest.serialization.Model): + """Last Mitigation Action Performed On Job. + + :param action_date_time_in_utc: Action performed date time. + :type action_date_time_in_utc: ~datetime.datetime + :param is_performed_by_customer: Action performed by customer, + possibility is that mitigation might happen by customer or service or by ops. + :type is_performed_by_customer: bool + :param customer_resolution: Resolution code provided by customer. Possible values include: + "None", "MoveToCleanUpDevice", "Resume". + :type customer_resolution: str or ~data_box_management_client.models.CustomerResolutionCode + """ + + _attribute_map = { + 'action_date_time_in_utc': {'key': 'actionDateTimeInUtc', 'type': 'iso-8601'}, + 'is_performed_by_customer': {'key': 'isPerformedByCustomer', 'type': 'bool'}, + 'customer_resolution': {'key': 'customerResolution', 'type': 'str'}, + } + + def __init__( + self, + *, + action_date_time_in_utc: Optional[datetime.datetime] = None, + is_performed_by_customer: Optional[bool] = None, + customer_resolution: Optional[Union[str, "CustomerResolutionCode"]] = None, + **kwargs + ): + super(LastMitigationActionOnJob, self).__init__(**kwargs) + self.action_date_time_in_utc = action_date_time_in_utc + self.is_performed_by_customer = is_performed_by_customer + self.customer_resolution = customer_resolution + + +class ManagedDiskDetails(DataAccountDetails): + """Details of the managed disks. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Account Type of the data to be transferred.Constant filled + by server. Possible values include: "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param share_password: Password for all the shares to be created on the device. Should not be + passed for TransferType:ExportFromAzure jobs. If this is not passed, the service will generate + password itself. This will not be returned in Get Call. Password Requirements : Password must + be minimum of 12 and maximum of 64 characters. Password must have at least one uppercase + alphabet, one number and one special character. Password cannot have the following characters : + IilLoO0 Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. + :type share_password: str + :param resource_group_id: Required. Resource Group Id of the compute disks. + :type resource_group_id: str + :param staging_storage_account_id: Required. Resource Id of the storage account that can be + used to copy the vhd for staging. + :type staging_storage_account_id: str + """ + + _validation = { + 'data_account_type': {'required': True}, + 'resource_group_id': {'required': True}, + 'staging_storage_account_id': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'share_password': {'key': 'sharePassword', 'type': 'str'}, + 'resource_group_id': {'key': 'resourceGroupId', 'type': 'str'}, + 'staging_storage_account_id': {'key': 'stagingStorageAccountId', 'type': 'str'}, + } + + def __init__( + self, + *, + resource_group_id: str, + staging_storage_account_id: str, + share_password: Optional[str] = None, + **kwargs + ): + super(ManagedDiskDetails, self).__init__(share_password=share_password, **kwargs) + self.data_account_type = 'ManagedDisk' # type: str + self.resource_group_id = resource_group_id + self.staging_storage_account_id = staging_storage_account_id + + +class MarkDevicesShippedRequest(msrest.serialization.Model): + """The request body to provide the delivery package details of job. + + All required parameters must be populated in order to send to Azure. + + :param delivery_package_details: Required. Delivery package details. + :type delivery_package_details: ~data_box_management_client.models.PackageCarrierInfo + """ + + _validation = { + 'delivery_package_details': {'required': True}, + } + + _attribute_map = { + 'delivery_package_details': {'key': 'deliveryPackageDetails', 'type': 'PackageCarrierInfo'}, + } + + def __init__( + self, + *, + delivery_package_details: "PackageCarrierInfo", + **kwargs + ): + super(MarkDevicesShippedRequest, self).__init__(**kwargs) + self.delivery_package_details = delivery_package_details + + +class MitigateJobRequest(msrest.serialization.Model): + """The Mitigate Job captured from request body for Mitigate API. + + All required parameters must be populated in order to send to Azure. + + :param customer_resolution_code: Required. Resolution code for the job. Possible values + include: "None", "MoveToCleanUpDevice", "Resume". + :type customer_resolution_code: str or + ~data_box_management_client.models.CustomerResolutionCode + """ + + _validation = { + 'customer_resolution_code': {'required': True}, + } + + _attribute_map = { + 'customer_resolution_code': {'key': 'customerResolutionCode', 'type': 'str'}, + } + + def __init__( + self, + *, + customer_resolution_code: Union[str, "CustomerResolutionCode"], + **kwargs + ): + super(MitigateJobRequest, self).__init__(**kwargs) + self.customer_resolution_code = customer_resolution_code + + +class NotificationPreference(msrest.serialization.Model): + """Notification preference for a job stage. + + All required parameters must be populated in order to send to Azure. + + :param stage_name: Required. Name of the stage. Possible values include: "DevicePrepared", + "Dispatched", "Delivered", "PickedUp", "AtAzureDC", "DataCopy". + :type stage_name: str or ~data_box_management_client.models.NotificationStageName + :param send_notification: Required. Notification is required or not. + :type send_notification: bool + """ + + _validation = { + 'stage_name': {'required': True}, + 'send_notification': {'required': True}, + } + + _attribute_map = { + 'stage_name': {'key': 'stageName', 'type': 'str'}, + 'send_notification': {'key': 'sendNotification', 'type': 'bool'}, + } + + def __init__( + self, + *, + stage_name: Union[str, "NotificationStageName"], + send_notification: bool = True, + **kwargs + ): + super(NotificationPreference, self).__init__(**kwargs) + self.stage_name = stage_name + self.send_notification = send_notification + + +class Operation(msrest.serialization.Model): + """Operation entity. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: Name of the operation. Format: + {resourceProviderNamespace}/{resourceType}/{read|write|delete|action}. + :vartype name: str + :ivar display: Operation display values. + :vartype display: ~data_box_management_client.models.OperationDisplay + :ivar properties: Operation properties. + :vartype properties: object + :ivar origin: Origin of the operation. Can be : user|system|user,system. + :vartype origin: str + :param is_data_action: Indicates whether the operation is a data action. + :type is_data_action: bool + """ + + _validation = { + 'name': {'readonly': True}, + 'display': {'readonly': True}, + 'properties': {'readonly': True}, + 'origin': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display': {'key': 'display', 'type': 'OperationDisplay'}, + 'properties': {'key': 'properties', 'type': 'object'}, + 'origin': {'key': 'origin', 'type': 'str'}, + 'is_data_action': {'key': 'isDataAction', 'type': 'bool'}, + } + + def __init__( + self, + *, + is_data_action: Optional[bool] = None, + **kwargs + ): + super(Operation, self).__init__(**kwargs) + self.name = None + self.display = None + self.properties = None + self.origin = None + self.is_data_action = is_data_action + + +class OperationDisplay(msrest.serialization.Model): + """Operation display. + + :param provider: Provider name. + :type provider: str + :param resource: Resource name. + :type resource: str + :param operation: Localized name of the operation for display purpose. + :type operation: str + :param description: Localized description of the operation for display purpose. + :type description: str + """ + + _attribute_map = { + 'provider': {'key': 'provider', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'str'}, + 'operation': {'key': 'operation', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__( + self, + *, + provider: Optional[str] = None, + resource: Optional[str] = None, + operation: Optional[str] = None, + description: Optional[str] = None, + **kwargs + ): + super(OperationDisplay, self).__init__(**kwargs) + self.provider = provider + self.resource = resource + self.operation = operation + self.description = description + + +class OperationList(msrest.serialization.Model): + """Operation Collection. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of operations. + :vartype value: list[~data_box_management_client.models.Operation] + :param next_link: Link for the next set of operations. + :type next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Operation]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + next_link: Optional[str] = None, + **kwargs + ): + super(OperationList, self).__init__(**kwargs) + self.value = None + self.next_link = next_link + + +class PackageCarrierDetails(msrest.serialization.Model): + """Package carrier details. + + :param carrier_account_number: Carrier Account Number of customer for customer disk. + :type carrier_account_number: str + :param carrier_name: Name of the carrier. + :type carrier_name: str + :param tracking_id: Tracking Id of shipment. + :type tracking_id: str + """ + + _attribute_map = { + 'carrier_account_number': {'key': 'carrierAccountNumber', 'type': 'str'}, + 'carrier_name': {'key': 'carrierName', 'type': 'str'}, + 'tracking_id': {'key': 'trackingId', 'type': 'str'}, + } + + def __init__( + self, + *, + carrier_account_number: Optional[str] = None, + carrier_name: Optional[str] = None, + tracking_id: Optional[str] = None, + **kwargs + ): + super(PackageCarrierDetails, self).__init__(**kwargs) + self.carrier_account_number = carrier_account_number + self.carrier_name = carrier_name + self.tracking_id = tracking_id + + +class PackageCarrierInfo(msrest.serialization.Model): + """package carrier info. + + :param carrier_name: Name of the carrier. + :type carrier_name: str + :param tracking_id: Tracking Id of shipment. + :type tracking_id: str + """ + + _attribute_map = { + 'carrier_name': {'key': 'carrierName', 'type': 'str'}, + 'tracking_id': {'key': 'trackingId', 'type': 'str'}, + } + + def __init__( + self, + *, + carrier_name: Optional[str] = None, + tracking_id: Optional[str] = None, + **kwargs + ): + super(PackageCarrierInfo, self).__init__(**kwargs) + self.carrier_name = carrier_name + self.tracking_id = tracking_id + + +class PackageShippingDetails(msrest.serialization.Model): + """package shipping details. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar tracking_url: Url where shipment can be tracked. + :vartype tracking_url: str + :param carrier_name: Name of the carrier. + :type carrier_name: str + :param tracking_id: Tracking Id of shipment. + :type tracking_id: str + """ + + _validation = { + 'tracking_url': {'readonly': True}, + } + + _attribute_map = { + 'tracking_url': {'key': 'trackingUrl', 'type': 'str'}, + 'carrier_name': {'key': 'carrierName', 'type': 'str'}, + 'tracking_id': {'key': 'trackingId', 'type': 'str'}, + } + + def __init__( + self, + *, + carrier_name: Optional[str] = None, + tracking_id: Optional[str] = None, + **kwargs + ): + super(PackageShippingDetails, self).__init__(**kwargs) + self.tracking_url = None + self.carrier_name = carrier_name + self.tracking_id = tracking_id + + +class Preferences(msrest.serialization.Model): + """Preferences related to the order. + + :param preferred_data_center_region: Preferred data center region. + :type preferred_data_center_region: list[str] + :param transport_preferences: Preferences related to the shipment logistics of the sku. + :type transport_preferences: ~data_box_management_client.models.TransportPreferences + :param encryption_preferences: Preferences related to the Encryption. + :type encryption_preferences: ~data_box_management_client.models.EncryptionPreferences + """ + + _attribute_map = { + 'preferred_data_center_region': {'key': 'preferredDataCenterRegion', 'type': '[str]'}, + 'transport_preferences': {'key': 'transportPreferences', 'type': 'TransportPreferences'}, + 'encryption_preferences': {'key': 'encryptionPreferences', 'type': 'EncryptionPreferences'}, + } + + def __init__( + self, + *, + preferred_data_center_region: Optional[List[str]] = None, + transport_preferences: Optional["TransportPreferences"] = None, + encryption_preferences: Optional["EncryptionPreferences"] = None, + **kwargs + ): + super(Preferences, self).__init__(**kwargs) + self.preferred_data_center_region = preferred_data_center_region + self.transport_preferences = transport_preferences + self.encryption_preferences = encryption_preferences + + +class PreferencesValidationRequest(ValidationInputRequest): + """Request to validate preference of transport and data center. + + All required parameters must be populated in order to send to Azure. + + :param validation_type: Required. Identifies the type of validation request.Constant filled by + server. Possible values include: "ValidateAddress", + "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator + :param preference: Preference of transport and data center. + :type preference: ~data_box_management_client.models.Preferences + :param device_type: Required. Device type to be used for the job. Possible values include: + "DataBox", "DataBoxDisk", "DataBoxHeavy", "DataBoxCustomerDisk". + :type device_type: str or ~data_box_management_client.models.SkuName + """ + + _validation = { + 'validation_type': {'required': True}, + 'device_type': {'required': True}, + } + + _attribute_map = { + 'validation_type': {'key': 'validationType', 'type': 'str'}, + 'preference': {'key': 'preference', 'type': 'Preferences'}, + 'device_type': {'key': 'deviceType', 'type': 'str'}, + } + + def __init__( + self, + *, + device_type: Union[str, "SkuName"], + preference: Optional["Preferences"] = None, + **kwargs + ): + super(PreferencesValidationRequest, self).__init__(**kwargs) + self.validation_type = 'ValidatePreferences' # type: str + self.preference = preference + self.device_type = device_type + + +class PreferencesValidationResponseProperties(ValidationInputResponse): + """Properties of data center and transport preference validation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param validation_type: Required. Identifies the type of validation response.Constant filled by + server. Possible values include: "ValidateAddress", + "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator + :ivar error: Error code and message of validation response. + :vartype error: ~data_box_management_client.models.CloudError + :ivar status: Validation status of requested data center and transport. Possible values + include: "Valid", "Invalid", "Skipped". + :vartype status: str or ~data_box_management_client.models.ValidationStatus + """ + + _validation = { + 'validation_type': {'required': True}, + 'error': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'validation_type': {'key': 'validationType', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'CloudError'}, + 'status': {'key': 'status', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PreferencesValidationResponseProperties, self).__init__(**kwargs) + self.validation_type = 'ValidatePreferences' # type: str + self.status = None + + +class RegionConfigurationRequest(msrest.serialization.Model): + """Request body to get the configuration for the region. + + :param schedule_availability_request: Request body to get the availability for scheduling + orders. + :type schedule_availability_request: + ~data_box_management_client.models.ScheduleAvailabilityRequest + :param transport_availability_request: Request body to get the transport availability for given + sku. + :type transport_availability_request: + ~data_box_management_client.models.TransportAvailabilityRequest + :param datacenter_address_request: Request body to get the datacenter address . + :type datacenter_address_request: ~data_box_management_client.models.DatacenterAddressRequest + """ + + _attribute_map = { + 'schedule_availability_request': {'key': 'scheduleAvailabilityRequest', 'type': 'ScheduleAvailabilityRequest'}, + 'transport_availability_request': {'key': 'transportAvailabilityRequest', 'type': 'TransportAvailabilityRequest'}, + 'datacenter_address_request': {'key': 'datacenterAddressRequest', 'type': 'DatacenterAddressRequest'}, + } + + def __init__( + self, + *, + schedule_availability_request: Optional["ScheduleAvailabilityRequest"] = None, + transport_availability_request: Optional["TransportAvailabilityRequest"] = None, + datacenter_address_request: Optional["DatacenterAddressRequest"] = None, + **kwargs + ): + super(RegionConfigurationRequest, self).__init__(**kwargs) + self.schedule_availability_request = schedule_availability_request + self.transport_availability_request = transport_availability_request + self.datacenter_address_request = datacenter_address_request + + +class RegionConfigurationResponse(msrest.serialization.Model): + """Configuration response specific to a region. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar schedule_availability_response: Schedule availability for given sku in a region. + :vartype schedule_availability_response: + ~data_box_management_client.models.ScheduleAvailabilityResponse + :ivar transport_availability_response: Transport options available for given sku in a region. + :vartype transport_availability_response: + ~data_box_management_client.models.TransportAvailabilityResponse + :ivar datacenter_address_response: Datacenter address for given sku in a region. + :vartype datacenter_address_response: + ~data_box_management_client.models.DatacenterAddressResponse + """ + + _validation = { + 'schedule_availability_response': {'readonly': True}, + 'transport_availability_response': {'readonly': True}, + 'datacenter_address_response': {'readonly': True}, + } + + _attribute_map = { + 'schedule_availability_response': {'key': 'scheduleAvailabilityResponse', 'type': 'ScheduleAvailabilityResponse'}, + 'transport_availability_response': {'key': 'transportAvailabilityResponse', 'type': 'TransportAvailabilityResponse'}, + 'datacenter_address_response': {'key': 'datacenterAddressResponse', 'type': 'DatacenterAddressResponse'}, + } + + def __init__( + self, + **kwargs + ): + super(RegionConfigurationResponse, self).__init__(**kwargs) + self.schedule_availability_response = None + self.transport_availability_response = None + self.datacenter_address_response = None + + +class ResourceIdentity(msrest.serialization.Model): + """Msi identity details of the resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param type: Identity type. + :type type: str + :ivar principal_id: Service Principal Id backing the Msi. + :vartype principal_id: str + :ivar tenant_id: Home Tenant Id. + :vartype tenant_id: str + :param user_assigned_identities: User Assigned Identities. + :type user_assigned_identities: dict[str, + ~data_box_management_client.models.UserAssignedIdentity] + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'}, + } + + def __init__( + self, + *, + type: Optional[str] = "None", + user_assigned_identities: Optional[Dict[str, "UserAssignedIdentity"]] = None, + **kwargs + ): + super(ResourceIdentity, self).__init__(**kwargs) + self.type = type + self.principal_id = None + self.tenant_id = None + self.user_assigned_identities = user_assigned_identities + + +class ScheduleAvailabilityResponse(msrest.serialization.Model): + """Schedule availability for given sku in a region. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar available_dates: List of dates available to schedule. + :vartype available_dates: list[~datetime.datetime] + """ + + _validation = { + 'available_dates': {'readonly': True}, + } + + _attribute_map = { + 'available_dates': {'key': 'availableDates', 'type': '[iso-8601]'}, + } + + def __init__( + self, + **kwargs + ): + super(ScheduleAvailabilityResponse, self).__init__(**kwargs) + self.available_dates = None + + +class ShareCredentialDetails(msrest.serialization.Model): + """Credential details of the shares in account. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar share_name: Name of the share. + :vartype share_name: str + :ivar share_type: Type of the share. Possible values include: "UnknownType", "HCS", + "BlockBlob", "PageBlob", "AzureFile", "ManagedDisk". + :vartype share_type: str or ~data_box_management_client.models.ShareDestinationFormatType + :ivar user_name: User name for the share. + :vartype user_name: str + :ivar password: Password for the share. + :vartype password: str + :ivar supported_access_protocols: Access protocols supported on the device. + :vartype supported_access_protocols: list[str or + ~data_box_management_client.models.AccessProtocol] + """ + + _validation = { + 'share_name': {'readonly': True}, + 'share_type': {'readonly': True}, + 'user_name': {'readonly': True}, + 'password': {'readonly': True}, + 'supported_access_protocols': {'readonly': True}, + } + + _attribute_map = { + 'share_name': {'key': 'shareName', 'type': 'str'}, + 'share_type': {'key': 'shareType', 'type': 'str'}, + 'user_name': {'key': 'userName', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'str'}, + 'supported_access_protocols': {'key': 'supportedAccessProtocols', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(ShareCredentialDetails, self).__init__(**kwargs) + self.share_name = None + self.share_type = None + self.user_name = None + self.password = None + self.supported_access_protocols = None + + +class ShipmentPickUpRequest(msrest.serialization.Model): + """Shipment pick up request details. + + All required parameters must be populated in order to send to Azure. + + :param start_time: Required. Minimum date after which the pick up should commence, this must be + in local time of pick up area. + :type start_time: ~datetime.datetime + :param end_time: Required. Maximum date before which the pick up should commence, this must be + in local time of pick up area. + :type end_time: ~datetime.datetime + :param shipment_location: Required. Shipment Location in the pickup place. Eg.front desk. + :type shipment_location: str + """ + + _validation = { + 'start_time': {'required': True}, + 'end_time': {'required': True}, + 'shipment_location': {'required': True}, + } + + _attribute_map = { + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'shipment_location': {'key': 'shipmentLocation', 'type': 'str'}, + } + + def __init__( + self, + *, + start_time: datetime.datetime, + end_time: datetime.datetime, + shipment_location: str, + **kwargs + ): + super(ShipmentPickUpRequest, self).__init__(**kwargs) + self.start_time = start_time + self.end_time = end_time + self.shipment_location = shipment_location + + +class ShipmentPickUpResponse(msrest.serialization.Model): + """Shipment pick up response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar confirmation_number: Confirmation number for the pick up request. + :vartype confirmation_number: str + :ivar ready_by_time: Time by which shipment should be ready for pick up, this is in local time + of pick up area. + :vartype ready_by_time: ~datetime.datetime + """ + + _validation = { + 'confirmation_number': {'readonly': True}, + 'ready_by_time': {'readonly': True}, + } + + _attribute_map = { + 'confirmation_number': {'key': 'confirmationNumber', 'type': 'str'}, + 'ready_by_time': {'key': 'readyByTime', 'type': 'iso-8601'}, + } + + def __init__( + self, + **kwargs + ): + super(ShipmentPickUpResponse, self).__init__(**kwargs) + self.confirmation_number = None + self.ready_by_time = None + + +class ShippingAddress(msrest.serialization.Model): + """Shipping address where customer wishes to receive the device. + + All required parameters must be populated in order to send to Azure. + + :param street_address1: Required. Street Address line 1. + :type street_address1: str + :param street_address2: Street Address line 2. + :type street_address2: str + :param street_address3: Street Address line 3. + :type street_address3: str + :param city: Name of the City. + :type city: str + :param state_or_province: Name of the State or Province. + :type state_or_province: str + :param country: Required. Name of the Country. + :type country: str + :param postal_code: Postal code. + :type postal_code: str + :param zip_extended_code: Extended Zip Code. + :type zip_extended_code: str + :param company_name: Name of the company. + :type company_name: str + :param address_type: Type of address. Possible values include: "None", "Residential", + "Commercial". Default value: "None". + :type address_type: str or ~data_box_management_client.models.AddressType + """ + + _validation = { + 'street_address1': {'required': True}, + 'country': {'required': True}, + } + + _attribute_map = { + 'street_address1': {'key': 'streetAddress1', 'type': 'str'}, + 'street_address2': {'key': 'streetAddress2', 'type': 'str'}, + 'street_address3': {'key': 'streetAddress3', 'type': 'str'}, + 'city': {'key': 'city', 'type': 'str'}, + 'state_or_province': {'key': 'stateOrProvince', 'type': 'str'}, + 'country': {'key': 'country', 'type': 'str'}, + 'postal_code': {'key': 'postalCode', 'type': 'str'}, + 'zip_extended_code': {'key': 'zipExtendedCode', 'type': 'str'}, + 'company_name': {'key': 'companyName', 'type': 'str'}, + 'address_type': {'key': 'addressType', 'type': 'str'}, + } + + def __init__( + self, + *, + street_address1: str, + country: str, + street_address2: Optional[str] = None, + street_address3: Optional[str] = None, + city: Optional[str] = None, + state_or_province: Optional[str] = None, + postal_code: Optional[str] = None, + zip_extended_code: Optional[str] = None, + company_name: Optional[str] = None, + address_type: Optional[Union[str, "AddressType"]] = "None", + **kwargs + ): + super(ShippingAddress, self).__init__(**kwargs) + self.street_address1 = street_address1 + self.street_address2 = street_address2 + self.street_address3 = street_address3 + self.city = city + self.state_or_province = state_or_province + self.country = country + self.postal_code = postal_code + self.zip_extended_code = zip_extended_code + self.company_name = company_name + self.address_type = address_type + + +class Sku(msrest.serialization.Model): + """The Sku. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The sku name. Possible values include: "DataBox", "DataBoxDisk", + "DataBoxHeavy", "DataBoxCustomerDisk". + :type name: str or ~data_box_management_client.models.SkuName + :param display_name: The display name of the sku. + :type display_name: str + :param family: The sku family. + :type family: str + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'family': {'key': 'family', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Union[str, "SkuName"], + display_name: Optional[str] = None, + family: Optional[str] = None, + **kwargs + ): + super(Sku, self).__init__(**kwargs) + self.name = name + self.display_name = display_name + self.family = family + + +class SkuAvailabilityValidationRequest(ValidationInputRequest): + """Request to validate sku availability. + + All required parameters must be populated in order to send to Azure. + + :param validation_type: Required. Identifies the type of validation request.Constant filled by + server. Possible values include: "ValidateAddress", + "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator + :param device_type: Required. Device type to be used for the job. Possible values include: + "DataBox", "DataBoxDisk", "DataBoxHeavy", "DataBoxCustomerDisk". + :type device_type: str or ~data_box_management_client.models.SkuName + :param transfer_type: Required. Type of the transfer. Possible values include: "ImportToAzure", + "ExportFromAzure". + :type transfer_type: str or ~data_box_management_client.models.TransferType + :param country: Required. ISO country code. Country for hardware shipment. For codes check: + https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements. + :type country: str + :param location: Required. Location for data transfer. For locations check: + https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. + :type location: str + """ + + _validation = { + 'validation_type': {'required': True}, + 'device_type': {'required': True}, + 'transfer_type': {'required': True}, + 'country': {'required': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'validation_type': {'key': 'validationType', 'type': 'str'}, + 'device_type': {'key': 'deviceType', 'type': 'str'}, + 'transfer_type': {'key': 'transferType', 'type': 'str'}, + 'country': {'key': 'country', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + } + + def __init__( + self, + *, + device_type: Union[str, "SkuName"], + transfer_type: Union[str, "TransferType"], + country: str, + location: str, + **kwargs + ): + super(SkuAvailabilityValidationRequest, self).__init__(**kwargs) + self.validation_type = 'ValidateSkuAvailability' # type: str + self.device_type = device_type + self.transfer_type = transfer_type + self.country = country + self.location = location + + +class SkuAvailabilityValidationResponseProperties(ValidationInputResponse): + """Properties of sku availability validation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param validation_type: Required. Identifies the type of validation response.Constant filled by + server. Possible values include: "ValidateAddress", + "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator + :ivar error: Error code and message of validation response. + :vartype error: ~data_box_management_client.models.CloudError + :ivar status: Sku availability validation status. Possible values include: "Valid", "Invalid", + "Skipped". + :vartype status: str or ~data_box_management_client.models.ValidationStatus + """ + + _validation = { + 'validation_type': {'required': True}, + 'error': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'validation_type': {'key': 'validationType', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'CloudError'}, + 'status': {'key': 'status', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SkuAvailabilityValidationResponseProperties, self).__init__(**kwargs) + self.validation_type = 'ValidateSkuAvailability' # type: str + self.status = None + + +class SkuCapacity(msrest.serialization.Model): + """Capacity of the sku. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar usable: Usable capacity in TB. + :vartype usable: str + :ivar maximum: Maximum capacity in TB. + :vartype maximum: str + """ + + _validation = { + 'usable': {'readonly': True}, + 'maximum': {'readonly': True}, + } + + _attribute_map = { + 'usable': {'key': 'usable', 'type': 'str'}, + 'maximum': {'key': 'maximum', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SkuCapacity, self).__init__(**kwargs) + self.usable = None + self.maximum = None + + +class SkuCost(msrest.serialization.Model): + """Describes metadata for retrieving price info. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar meter_id: Meter id of the Sku. + :vartype meter_id: str + :ivar meter_type: The type of the meter. + :vartype meter_type: str + :ivar multiplier: Multiplier specifies the region specific value to be multiplied with 1$ guid. + Eg: Our new regions will be using 1$ shipping guid with appropriate multiplier specific to + region. + :vartype multiplier: float + """ + + _validation = { + 'meter_id': {'readonly': True}, + 'meter_type': {'readonly': True}, + 'multiplier': {'readonly': True}, + } + + _attribute_map = { + 'meter_id': {'key': 'meterId', 'type': 'str'}, + 'meter_type': {'key': 'meterType', 'type': 'str'}, + 'multiplier': {'key': 'multiplier', 'type': 'float'}, + } + + def __init__( + self, + **kwargs + ): + super(SkuCost, self).__init__(**kwargs) + self.meter_id = None + self.meter_type = None + self.multiplier = None + + +class SkuInformation(msrest.serialization.Model): + """Information of the sku. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar sku: The Sku. + :vartype sku: ~data_box_management_client.models.Sku + :ivar enabled: The sku is enabled or not. + :vartype enabled: bool + :ivar data_location_to_service_location_map: The map of data location to service location. + :vartype data_location_to_service_location_map: + list[~data_box_management_client.models.DataLocationToServiceLocationMap] + :ivar capacity: Capacity of the Sku. + :vartype capacity: ~data_box_management_client.models.SkuCapacity + :ivar costs: Cost of the Sku. + :vartype costs: list[~data_box_management_client.models.SkuCost] + :ivar api_versions: Api versions that support this Sku. + :vartype api_versions: list[str] + :ivar disabled_reason: Reason why the Sku is disabled. Possible values include: "None", + "Country", "Region", "Feature", "OfferType", "NoSubscriptionInfo". + :vartype disabled_reason: str or ~data_box_management_client.models.SkuDisabledReason + :ivar disabled_reason_message: Message for why the Sku is disabled. + :vartype disabled_reason_message: str + :ivar required_feature: Required feature to access the sku. + :vartype required_feature: str + """ + + _validation = { + 'sku': {'readonly': True}, + 'enabled': {'readonly': True}, + 'data_location_to_service_location_map': {'readonly': True}, + 'capacity': {'readonly': True}, + 'costs': {'readonly': True}, + 'api_versions': {'readonly': True}, + 'disabled_reason': {'readonly': True}, + 'disabled_reason_message': {'readonly': True}, + 'required_feature': {'readonly': True}, + } + + _attribute_map = { + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'data_location_to_service_location_map': {'key': 'properties.dataLocationToServiceLocationMap', 'type': '[DataLocationToServiceLocationMap]'}, + 'capacity': {'key': 'properties.capacity', 'type': 'SkuCapacity'}, + 'costs': {'key': 'properties.costs', 'type': '[SkuCost]'}, + 'api_versions': {'key': 'properties.apiVersions', 'type': '[str]'}, + 'disabled_reason': {'key': 'properties.disabledReason', 'type': 'str'}, + 'disabled_reason_message': {'key': 'properties.disabledReasonMessage', 'type': 'str'}, + 'required_feature': {'key': 'properties.requiredFeature', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SkuInformation, self).__init__(**kwargs) + self.sku = None + self.enabled = None + self.data_location_to_service_location_map = None + self.capacity = None + self.costs = None + self.api_versions = None + self.disabled_reason = None + self.disabled_reason_message = None + self.required_feature = None + + +class StorageAccountDetails(DataAccountDetails): + """Details for the storage account. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Account Type of the data to be transferred.Constant filled + by server. Possible values include: "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param share_password: Password for all the shares to be created on the device. Should not be + passed for TransferType:ExportFromAzure jobs. If this is not passed, the service will generate + password itself. This will not be returned in Get Call. Password Requirements : Password must + be minimum of 12 and maximum of 64 characters. Password must have at least one uppercase + alphabet, one number and one special character. Password cannot have the following characters : + IilLoO0 Password can have only alphabets, numbers and these characters : @#-$%^!+=;:_()]+. + :type share_password: str + :param storage_account_id: Required. Storage Account Resource Id. + :type storage_account_id: str + :param xt_passthrough_storage_account_id: Customer's Storage Account Resource Id of the XT- + passthrough job. + :type xt_passthrough_storage_account_id: str + :param xt_passthrough_storage_account_tenant_id: Customer's Storage Account's Tenant Id of the + XT-passthrough job. + :type xt_passthrough_storage_account_tenant_id: str + """ + + _validation = { + 'data_account_type': {'required': True}, + 'storage_account_id': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'share_password': {'key': 'sharePassword', 'type': 'str'}, + 'storage_account_id': {'key': 'storageAccountId', 'type': 'str'}, + 'xt_passthrough_storage_account_id': {'key': 'xtPassthroughStorageAccountId', 'type': 'str'}, + 'xt_passthrough_storage_account_tenant_id': {'key': 'xtPassthroughStorageAccountTenantId', 'type': 'str'}, + } + + def __init__( + self, + *, + storage_account_id: str, + share_password: Optional[str] = None, + xt_passthrough_storage_account_id: Optional[str] = None, + xt_passthrough_storage_account_tenant_id: Optional[str] = None, + **kwargs + ): + super(StorageAccountDetails, self).__init__(share_password=share_password, **kwargs) + self.data_account_type = 'StorageAccount' # type: str + self.storage_account_id = storage_account_id + self.xt_passthrough_storage_account_id = xt_passthrough_storage_account_id + self.xt_passthrough_storage_account_tenant_id = xt_passthrough_storage_account_tenant_id + + +class SubscriptionIsAllowedToCreateJobValidationRequest(ValidationInputRequest): + """Request to validate subscription permission to create jobs. + + All required parameters must be populated in order to send to Azure. + + :param validation_type: Required. Identifies the type of validation request.Constant filled by + server. Possible values include: "ValidateAddress", + "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator + """ + + _validation = { + 'validation_type': {'required': True}, + } + + _attribute_map = { + 'validation_type': {'key': 'validationType', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SubscriptionIsAllowedToCreateJobValidationRequest, self).__init__(**kwargs) + self.validation_type = 'ValidateSubscriptionIsAllowedToCreateJob' # type: str + + +class SubscriptionIsAllowedToCreateJobValidationResponseProperties(ValidationInputResponse): + """Properties of subscription permission to create job validation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param validation_type: Required. Identifies the type of validation response.Constant filled by + server. Possible values include: "ValidateAddress", + "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator + :ivar error: Error code and message of validation response. + :vartype error: ~data_box_management_client.models.CloudError + :ivar status: Validation status of subscription permission to create job. Possible values + include: "Valid", "Invalid", "Skipped". + :vartype status: str or ~data_box_management_client.models.ValidationStatus + """ + + _validation = { + 'validation_type': {'required': True}, + 'error': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'validation_type': {'key': 'validationType', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'CloudError'}, + 'status': {'key': 'status', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SubscriptionIsAllowedToCreateJobValidationResponseProperties, self).__init__(**kwargs) + self.validation_type = 'ValidateSubscriptionIsAllowedToCreateJob' # type: str + self.status = None + + +class SystemData(msrest.serialization.Model): + """Provides details about resource creation and update time. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar created_by: A string identifier for the identity that created the resource. + :vartype created_by: str + :ivar created_by_type: The type of identity that created the resource: user, application, + managedIdentity. + :vartype created_by_type: str + :ivar created_at: The timestamp of resource creation (UTC). + :vartype created_at: ~datetime.datetime + :ivar last_modified_by: A string identifier for the identity that last modified the resource. + :vartype last_modified_by: str + :ivar last_modified_by_type: The type of identity that last modified the resource: user, + application, managedIdentity. + :vartype last_modified_by_type: str + :ivar last_modified_at: The timestamp of resource last modification (UTC). + :vartype last_modified_at: ~datetime.datetime + """ + + _validation = { + 'created_by': {'readonly': True}, + 'created_by_type': {'readonly': True}, + 'created_at': {'readonly': True}, + 'last_modified_by': {'readonly': True}, + 'last_modified_by_type': {'readonly': True}, + 'last_modified_at': {'readonly': True}, + } + + _attribute_map = { + 'created_by': {'key': 'createdBy', 'type': 'str'}, + 'created_by_type': {'key': 'createdByType', 'type': 'str'}, + 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, + 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, + 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, + 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, + } + + def __init__( + self, + **kwargs + ): + super(SystemData, self).__init__(**kwargs) + self.created_by = None + self.created_by_type = None + self.created_at = None + self.last_modified_by = None + self.last_modified_by_type = None + self.last_modified_at = None + + +class TransferAllDetails(msrest.serialization.Model): + """Details to transfer all data. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Type of the account of data. Possible values include: + "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param transfer_all_blobs: To indicate if all Azure blobs have to be transferred. + :type transfer_all_blobs: bool + :param transfer_all_files: To indicate if all Azure Files have to be transferred. + :type transfer_all_files: bool + """ + + _validation = { + 'data_account_type': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'transfer_all_blobs': {'key': 'transferAllBlobs', 'type': 'bool'}, + 'transfer_all_files': {'key': 'transferAllFiles', 'type': 'bool'}, + } + + def __init__( + self, + *, + data_account_type: Union[str, "DataAccountType"], + transfer_all_blobs: Optional[bool] = None, + transfer_all_files: Optional[bool] = None, + **kwargs + ): + super(TransferAllDetails, self).__init__(**kwargs) + self.data_account_type = data_account_type + self.transfer_all_blobs = transfer_all_blobs + self.transfer_all_files = transfer_all_files + + +class TransferConfiguration(msrest.serialization.Model): + """Configuration for defining the transfer of data. + + All required parameters must be populated in order to send to Azure. + + :param transfer_configuration_type: Required. Type of the configuration for transfer. Possible + values include: "TransferAll", "TransferUsingFilter". + :type transfer_configuration_type: str or + ~data_box_management_client.models.TransferConfigurationType + :param transfer_filter_details: Map of filter type and the details to filter. This field is + required only if the TransferConfigurationType is given as TransferUsingFilter. + :type transfer_filter_details: + ~data_box_management_client.models.TransferConfigurationTransferFilterDetails + :param transfer_all_details: Map of filter type and the details to transfer all data. This + field is required only if the TransferConfigurationType is given as TransferAll. + :type transfer_all_details: + ~data_box_management_client.models.TransferConfigurationTransferAllDetails + """ + + _validation = { + 'transfer_configuration_type': {'required': True}, + } + + _attribute_map = { + 'transfer_configuration_type': {'key': 'transferConfigurationType', 'type': 'str'}, + 'transfer_filter_details': {'key': 'transferFilterDetails', 'type': 'TransferConfigurationTransferFilterDetails'}, + 'transfer_all_details': {'key': 'transferAllDetails', 'type': 'TransferConfigurationTransferAllDetails'}, + } + + def __init__( + self, + *, + transfer_configuration_type: Union[str, "TransferConfigurationType"], + transfer_filter_details: Optional["TransferConfigurationTransferFilterDetails"] = None, + transfer_all_details: Optional["TransferConfigurationTransferAllDetails"] = None, + **kwargs + ): + super(TransferConfiguration, self).__init__(**kwargs) + self.transfer_configuration_type = transfer_configuration_type + self.transfer_filter_details = transfer_filter_details + self.transfer_all_details = transfer_all_details + + +class TransferConfigurationTransferAllDetails(msrest.serialization.Model): + """Map of filter type and the details to transfer all data. This field is required only if the TransferConfigurationType is given as TransferAll. + + :param include: Details to transfer all data. + :type include: ~data_box_management_client.models.TransferAllDetails + """ + + _attribute_map = { + 'include': {'key': 'include', 'type': 'TransferAllDetails'}, + } + + def __init__( + self, + *, + include: Optional["TransferAllDetails"] = None, + **kwargs + ): + super(TransferConfigurationTransferAllDetails, self).__init__(**kwargs) + self.include = include + + +class TransferConfigurationTransferFilterDetails(msrest.serialization.Model): + """Map of filter type and the details to filter. This field is required only if the TransferConfigurationType is given as TransferUsingFilter. + + :param include: Details of the filtering the transfer of data. + :type include: ~data_box_management_client.models.TransferFilterDetails + """ + + _attribute_map = { + 'include': {'key': 'include', 'type': 'TransferFilterDetails'}, + } + + def __init__( + self, + *, + include: Optional["TransferFilterDetails"] = None, + **kwargs + ): + super(TransferConfigurationTransferFilterDetails, self).__init__(**kwargs) + self.include = include + + +class TransferFilterDetails(msrest.serialization.Model): + """Details of the filtering the transfer of data. + + All required parameters must be populated in order to send to Azure. + + :param data_account_type: Required. Type of the account of data. Possible values include: + "StorageAccount", "ManagedDisk". + :type data_account_type: str or ~data_box_management_client.models.DataAccountType + :param blob_filter_details: Filter details to transfer blobs. + :type blob_filter_details: ~data_box_management_client.models.BlobFilterDetails + :param azure_file_filter_details: Filter details to transfer Azure files. + :type azure_file_filter_details: ~data_box_management_client.models.AzureFileFilterDetails + :param filter_file_details: Details of the filter files to be used for data transfer. + :type filter_file_details: list[~data_box_management_client.models.FilterFileDetails] + """ + + _validation = { + 'data_account_type': {'required': True}, + } + + _attribute_map = { + 'data_account_type': {'key': 'dataAccountType', 'type': 'str'}, + 'blob_filter_details': {'key': 'blobFilterDetails', 'type': 'BlobFilterDetails'}, + 'azure_file_filter_details': {'key': 'azureFileFilterDetails', 'type': 'AzureFileFilterDetails'}, + 'filter_file_details': {'key': 'filterFileDetails', 'type': '[FilterFileDetails]'}, + } + + def __init__( + self, + *, + data_account_type: Union[str, "DataAccountType"], + blob_filter_details: Optional["BlobFilterDetails"] = None, + azure_file_filter_details: Optional["AzureFileFilterDetails"] = None, + filter_file_details: Optional[List["FilterFileDetails"]] = None, + **kwargs + ): + super(TransferFilterDetails, self).__init__(**kwargs) + self.data_account_type = data_account_type + self.blob_filter_details = blob_filter_details + self.azure_file_filter_details = azure_file_filter_details + self.filter_file_details = filter_file_details + + +class TransportAvailabilityDetails(msrest.serialization.Model): + """Transport options availability details for given region. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar shipment_type: Transport Shipment Type supported for given region. Possible values + include: "CustomerManaged", "MicrosoftManaged". + :vartype shipment_type: str or ~data_box_management_client.models.TransportShipmentTypes + """ + + _validation = { + 'shipment_type': {'readonly': True}, + } + + _attribute_map = { + 'shipment_type': {'key': 'shipmentType', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(TransportAvailabilityDetails, self).__init__(**kwargs) + self.shipment_type = None + + +class TransportAvailabilityRequest(msrest.serialization.Model): + """Request body to get the transport availability for given sku. + + :param sku_name: Type of the device. Possible values include: "DataBox", "DataBoxDisk", + "DataBoxHeavy", "DataBoxCustomerDisk". + :type sku_name: str or ~data_box_management_client.models.SkuName + """ + + _attribute_map = { + 'sku_name': {'key': 'skuName', 'type': 'str'}, + } + + def __init__( + self, + *, + sku_name: Optional[Union[str, "SkuName"]] = None, + **kwargs + ): + super(TransportAvailabilityRequest, self).__init__(**kwargs) + self.sku_name = sku_name + + +class TransportAvailabilityResponse(msrest.serialization.Model): + """Transport options available for given sku in a region. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar transport_availability_details: List of transport availability details for given region. + :vartype transport_availability_details: + list[~data_box_management_client.models.TransportAvailabilityDetails] + """ + + _validation = { + 'transport_availability_details': {'readonly': True}, + } + + _attribute_map = { + 'transport_availability_details': {'key': 'transportAvailabilityDetails', 'type': '[TransportAvailabilityDetails]'}, + } + + def __init__( + self, + **kwargs + ): + super(TransportAvailabilityResponse, self).__init__(**kwargs) + self.transport_availability_details = None + + +class TransportPreferences(msrest.serialization.Model): + """Preferences related to the shipment logistics of the sku. + + All required parameters must be populated in order to send to Azure. + + :param preferred_shipment_type: Required. Indicates Shipment Logistics type that the customer + preferred. Possible values include: "CustomerManaged", "MicrosoftManaged". + :type preferred_shipment_type: str or ~data_box_management_client.models.TransportShipmentTypes + """ + + _validation = { + 'preferred_shipment_type': {'required': True}, + } + + _attribute_map = { + 'preferred_shipment_type': {'key': 'preferredShipmentType', 'type': 'str'}, + } + + def __init__( + self, + *, + preferred_shipment_type: Union[str, "TransportShipmentTypes"], + **kwargs + ): + super(TransportPreferences, self).__init__(**kwargs) + self.preferred_shipment_type = preferred_shipment_type + + +class UnencryptedCredentials(msrest.serialization.Model): + """Unencrypted credentials for accessing device. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar job_name: Name of the job. + :vartype job_name: str + :ivar job_secrets: Secrets related to this job. + :vartype job_secrets: ~data_box_management_client.models.JobSecrets + """ + + _validation = { + 'job_name': {'readonly': True}, + 'job_secrets': {'readonly': True}, + } + + _attribute_map = { + 'job_name': {'key': 'jobName', 'type': 'str'}, + 'job_secrets': {'key': 'jobSecrets', 'type': 'JobSecrets'}, + } + + def __init__( + self, + **kwargs + ): + super(UnencryptedCredentials, self).__init__(**kwargs) + self.job_name = None + self.job_secrets = None + + +class UnencryptedCredentialsList(msrest.serialization.Model): + """List of unencrypted credentials for accessing device. + + :param value: List of unencrypted credentials. + :type value: list[~data_box_management_client.models.UnencryptedCredentials] + :param next_link: Link for the next set of unencrypted credentials. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[UnencryptedCredentials]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: Optional[List["UnencryptedCredentials"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + super(UnencryptedCredentialsList, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class UpdateJobDetails(msrest.serialization.Model): + """Job details for update. + + :param contact_details: Contact details for notification and shipping. + :type contact_details: ~data_box_management_client.models.ContactDetails + :param shipping_address: Shipping address of the customer. + :type shipping_address: ~data_box_management_client.models.ShippingAddress + :param key_encryption_key: Key encryption key for the job. + :type key_encryption_key: ~data_box_management_client.models.KeyEncryptionKey + :param return_package_details: Return package details of job. This is applicable only for + customer disk sku. + :type return_package_details: ~data_box_management_client.models.PackageCarrierDetails + """ + + _attribute_map = { + 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, + 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, + 'key_encryption_key': {'key': 'keyEncryptionKey', 'type': 'KeyEncryptionKey'}, + 'return_package_details': {'key': 'returnPackageDetails', 'type': 'PackageCarrierDetails'}, + } + + def __init__( + self, + *, + contact_details: Optional["ContactDetails"] = None, + shipping_address: Optional["ShippingAddress"] = None, + key_encryption_key: Optional["KeyEncryptionKey"] = None, + return_package_details: Optional["PackageCarrierDetails"] = None, + **kwargs + ): + super(UpdateJobDetails, self).__init__(**kwargs) + self.contact_details = contact_details + self.shipping_address = shipping_address + self.key_encryption_key = key_encryption_key + self.return_package_details = return_package_details + + +class UserAssignedIdentity(msrest.serialization.Model): + """Class defining User assigned identity details. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar principal_id: The principal id of user assigned identity. + :vartype principal_id: str + :ivar client_id: The client id of user assigned identity. + :vartype client_id: str + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'client_id': {'readonly': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'client_id': {'key': 'clientId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(UserAssignedIdentity, self).__init__(**kwargs) + self.principal_id = None + self.client_id = None + + +class UserAssignedProperties(msrest.serialization.Model): + """User assigned identity properties. + + :param resource_id: Arm resource id for user assigned identity to be used to fetch MSI token. + :type resource_id: str + """ + + _attribute_map = { + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + } + + def __init__( + self, + *, + resource_id: Optional[str] = None, + **kwargs + ): + super(UserAssignedProperties, self).__init__(**kwargs) + self.resource_id = resource_id + + +class ValidateAddress(ValidationInputRequest): + """The requirements to validate customer address where the device needs to be shipped. + + All required parameters must be populated in order to send to Azure. + + :param validation_type: Required. Identifies the type of validation request.Constant filled by + server. Possible values include: "ValidateAddress", + "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", + "ValidateSkuAvailability", "ValidateDataTransferDetails". + :type validation_type: str or ~data_box_management_client.models.ValidationInputDiscriminator + :param shipping_address: Required. Shipping address of the customer. + :type shipping_address: ~data_box_management_client.models.ShippingAddress + :param device_type: Required. Device type to be used for the job. Possible values include: + "DataBox", "DataBoxDisk", "DataBoxHeavy", "DataBoxCustomerDisk". + :type device_type: str or ~data_box_management_client.models.SkuName + :param transport_preferences: Preferences related to the shipment logistics of the sku. + :type transport_preferences: ~data_box_management_client.models.TransportPreferences + """ + + _validation = { + 'validation_type': {'required': True}, + 'shipping_address': {'required': True}, + 'device_type': {'required': True}, + } + + _attribute_map = { + 'validation_type': {'key': 'validationType', 'type': 'str'}, + 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, + 'device_type': {'key': 'deviceType', 'type': 'str'}, + 'transport_preferences': {'key': 'transportPreferences', 'type': 'TransportPreferences'}, + } + + def __init__( + self, + *, + shipping_address: "ShippingAddress", + device_type: Union[str, "SkuName"], + transport_preferences: Optional["TransportPreferences"] = None, + **kwargs + ): + super(ValidateAddress, self).__init__(**kwargs) + self.validation_type = 'ValidateAddress' # type: str + self.shipping_address = shipping_address + self.device_type = device_type + self.transport_preferences = transport_preferences + + +class ValidationResponse(msrest.serialization.Model): + """Response of pre job creation validations. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar status: Overall validation status. Possible values include: "AllValidToProceed", + "InputsRevisitRequired", "CertainInputValidationsSkipped". + :vartype status: str or ~data_box_management_client.models.OverallValidationStatus + :ivar individual_response_details: List of response details contain validationType and its + response as key and value respectively. + :vartype individual_response_details: + list[~data_box_management_client.models.ValidationInputResponse] + """ + + _validation = { + 'status': {'readonly': True}, + 'individual_response_details': {'readonly': True}, + } + + _attribute_map = { + 'status': {'key': 'properties.status', 'type': 'str'}, + 'individual_response_details': {'key': 'properties.individualResponseDetails', 'type': '[ValidationInputResponse]'}, + } + + def __init__( + self, + **kwargs + ): + super(ValidationResponse, self).__init__(**kwargs) + self.status = None + self.individual_response_details = None diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/operations/__init__.py b/src/databox/azext_databox/vendored_sdks/databox/operations/__init__.py similarity index 83% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/operations/__init__.py rename to src/databox/azext_databox/vendored_sdks/databox/operations/__init__.py index 9c8fa7a8253..bd13cc67afb 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/operations/__init__.py +++ b/src/databox/azext_databox/vendored_sdks/databox/operations/__init__.py @@ -8,10 +8,12 @@ from ._operations import Operations from ._jobs_operations import JobsOperations +from ._data_box_management_client_operations import DataBoxManagementClientOperationsMixin from ._service_operations import ServiceOperations __all__ = [ 'Operations', 'JobsOperations', + 'DataBoxManagementClientOperationsMixin', 'ServiceOperations', ] diff --git a/src/databox/azext_databox/vendored_sdks/databox/operations/_data_box_management_client_operations.py b/src/databox/azext_databox/vendored_sdks/databox/operations/_data_box_management_client_operations.py new file mode 100644 index 00000000000..6c6d863907e --- /dev/null +++ b/src/databox/azext_databox/vendored_sdks/databox/operations/_data_box_management_client_operations.py @@ -0,0 +1,91 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class DataBoxManagementClientOperationsMixin(object): + + def mitigate( + self, + job_name, # type: str + resource_group_name, # type: str + mitigate_job_request, # type: "models.MitigateJobRequest" + **kwargs # type: Any + ): + # type: (...) -> None + """Request to mitigate for a given job. + + :param job_name: The name of the job Resource within the specified resource group. job names + must be between 3 and 24 characters in length and use any alphanumeric and underscore only. + :type job_name: str + :param resource_group_name: The Resource Group Name. + :type resource_group_name: str + :param mitigate_job_request: Mitigation Request. + :type mitigate_job_request: ~data_box_management_client.models.MitigateJobRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.mitigate.metadata['url'] # type: ignore + path_format_arguments = { + 'jobName': self._serialize.url("job_name", job_name, 'str', max_length=24, min_length=3, pattern=r'^[-\w\.]+$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(mitigate_job_request, 'MitigateJobRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + mitigate.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBox/jobs/{jobName}/mitigate'} # type: ignore diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/operations/_jobs_operations.py b/src/databox/azext_databox/vendored_sdks/databox/operations/_jobs_operations.py similarity index 82% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/operations/_jobs_operations.py rename to src/databox/azext_databox/vendored_sdks/databox/operations/_jobs_operations.py index e984b83e43a..397b60ae5e4 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/operations/_jobs_operations.py +++ b/src/databox/azext_databox/vendored_sdks/databox/operations/_jobs_operations.py @@ -16,7 +16,7 @@ from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling -from .. import models as _models +from .. import models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports @@ -32,14 +32,14 @@ class JobsOperations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.databox.models + :type models: ~data_box_management_client.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ - models = _models + models = models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -52,7 +52,7 @@ def list( skip_token=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> Iterable["_models.JobResourceList"] + # type: (...) -> Iterable["models.JobResourceList"] """Lists all the jobs available under the subscription. :param skip_token: $skipToken is supported on Get list of jobs, which provides the next page in @@ -60,15 +60,15 @@ def list( :type skip_token: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either JobResourceList or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databox.models.JobResourceList] + :rtype: ~azure.core.paging.ItemPaged[~data_box_management_client.models.JobResourceList] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResourceList"] + cls = kwargs.pop('cls', None) # type: ClsType["models.JobResourceList"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" accept = "application/json" def prepare_request(next_link=None): @@ -110,8 +110,9 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -120,13 +121,79 @@ def get_next(next_link=None): ) list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataBox/jobs'} # type: ignore + def mark_devices_shipped( + self, + job_name, # type: str + resource_group_name, # type: str + mark_devices_shipped_request, # type: "models.MarkDevicesShippedRequest" + **kwargs # type: Any + ): + # type: (...) -> None + """Request to mark devices for a given job as shipped. + + :param job_name: The name of the job Resource within the specified resource group. job names + must be between 3 and 24 characters in length and use any alphanumeric and underscore only. + :type job_name: str + :param resource_group_name: The Resource Group Name. + :type resource_group_name: str + :param mark_devices_shipped_request: Mark Devices Shipped Request. + :type mark_devices_shipped_request: ~data_box_management_client.models.MarkDevicesShippedRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.mark_devices_shipped.metadata['url'] # type: ignore + path_format_arguments = { + 'jobName': self._serialize.url("job_name", job_name, 'str', max_length=24, min_length=3, pattern=r'^[-\w\.]+$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(mark_devices_shipped_request, 'MarkDevicesShippedRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + mark_devices_shipped.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBox/jobs/{jobName}/markDevicesShipped'} # type: ignore + def list_by_resource_group( self, resource_group_name, # type: str skip_token=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> Iterable["_models.JobResourceList"] + # type: (...) -> Iterable["models.JobResourceList"] """Lists all the jobs available under the given resource group. :param resource_group_name: The Resource Group Name. @@ -136,15 +203,15 @@ def list_by_resource_group( :type skip_token: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either JobResourceList or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databox.models.JobResourceList] + :rtype: ~azure.core.paging.ItemPaged[~data_box_management_client.models.JobResourceList] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResourceList"] + cls = kwargs.pop('cls', None) # type: ClsType["models.JobResourceList"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" accept = "application/json" def prepare_request(next_link=None): @@ -187,8 +254,9 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -204,7 +272,7 @@ def get( expand=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> "_models.JobResource" + # type: (...) -> "models.JobResource" """Gets information about the specified job. :param resource_group_name: The Resource Group Name. @@ -217,15 +285,15 @@ def get( :type expand: str :keyword callable cls: A custom type or function that will be passed the direct response :return: JobResource, or the result of cls(response) - :rtype: ~azure.mgmt.databox.models.JobResource + :rtype: ~data_box_management_client.models.JobResource :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResource"] + cls = kwargs.pop('cls', None) # type: ClsType["models.JobResource"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" accept = "application/json" # Construct URL @@ -253,7 +321,8 @@ def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('JobResource', pipeline_response) @@ -267,16 +336,16 @@ def _create_initial( self, resource_group_name, # type: str job_name, # type: str - job_resource, # type: "_models.JobResource" + job_resource, # type: "models.JobResource" **kwargs # type: Any ): - # type: (...) -> Optional["_models.JobResource"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.JobResource"]] + # type: (...) -> Optional["models.JobResource"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.JobResource"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -307,7 +376,8 @@ def _create_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: @@ -323,10 +393,10 @@ def begin_create( self, resource_group_name, # type: str job_name, # type: str - job_resource, # type: "_models.JobResource" + job_resource, # type: "models.JobResource" **kwargs # type: Any ): - # type: (...) -> LROPoller["_models.JobResource"] + # type: (...) -> LROPoller["models.JobResource"] """Creates a new job with the specified parameters. Existing job cannot be updated with this API and should instead be updated with the Update job API. @@ -336,7 +406,7 @@ def begin_create( must be between 3 and 24 characters in length and use any alphanumeric and underscore only. :type job_name: str :param job_resource: Job details from request body. - :type job_resource: ~azure.mgmt.databox.models.JobResource + :type job_resource: ~data_box_management_client.models.JobResource :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a @@ -344,11 +414,11 @@ def begin_create( :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either JobResource or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databox.models.JobResource] + :rtype: ~azure.core.polling.LROPoller[~data_box_management_client.models.JobResource] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResource"] + cls = kwargs.pop('cls', None) # type: ClsType["models.JobResource"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -405,7 +475,7 @@ def _delete_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" accept = "application/json" # Construct URL @@ -429,9 +499,10 @@ def _delete_initial( pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response - if response.status_code not in [202, 204]: + if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) @@ -508,17 +579,17 @@ def _update_initial( self, resource_group_name, # type: str job_name, # type: str - job_resource_update_parameter, # type: "_models.JobResourceUpdateParameter" + job_resource_update_parameter, # type: "models.JobResourceUpdateParameter" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> Optional["_models.JobResource"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.JobResource"]] + # type: (...) -> Optional["models.JobResource"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.JobResource"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -551,7 +622,8 @@ def _update_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: @@ -567,11 +639,11 @@ def begin_update( self, resource_group_name, # type: str job_name, # type: str - job_resource_update_parameter, # type: "_models.JobResourceUpdateParameter" + job_resource_update_parameter, # type: "models.JobResourceUpdateParameter" if_match=None, # type: Optional[str] **kwargs # type: Any ): - # type: (...) -> LROPoller["_models.JobResource"] + # type: (...) -> LROPoller["models.JobResource"] """Updates the properties of an existing job. :param resource_group_name: The Resource Group Name. @@ -580,7 +652,7 @@ def begin_update( must be between 3 and 24 characters in length and use any alphanumeric and underscore only. :type job_name: str :param job_resource_update_parameter: Job update parameters from request body. - :type job_resource_update_parameter: ~azure.mgmt.databox.models.JobResourceUpdateParameter + :type job_resource_update_parameter: ~data_box_management_client.models.JobResourceUpdateParameter :param if_match: Defines the If-Match condition. The patch will be performed only if the ETag of the job on the server matches this value. :type if_match: str @@ -591,11 +663,11 @@ def begin_update( :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either JobResource or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databox.models.JobResource] + :rtype: ~azure.core.polling.LROPoller[~data_box_management_client.models.JobResource] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResource"] + cls = kwargs.pop('cls', None) # type: ClsType["models.JobResource"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval @@ -645,10 +717,10 @@ def book_shipment_pick_up( self, resource_group_name, # type: str job_name, # type: str - shipment_pick_up_request, # type: "_models.ShipmentPickUpRequest" + shipment_pick_up_request, # type: "models.ShipmentPickUpRequest" **kwargs # type: Any ): - # type: (...) -> "_models.ShipmentPickUpResponse" + # type: (...) -> "models.ShipmentPickUpResponse" """Book shipment pick up. :param resource_group_name: The Resource Group Name. @@ -657,18 +729,18 @@ def book_shipment_pick_up( must be between 3 and 24 characters in length and use any alphanumeric and underscore only. :type job_name: str :param shipment_pick_up_request: Details of shipment pick up request. - :type shipment_pick_up_request: ~azure.mgmt.databox.models.ShipmentPickUpRequest + :type shipment_pick_up_request: ~data_box_management_client.models.ShipmentPickUpRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ShipmentPickUpResponse, or the result of cls(response) - :rtype: ~azure.mgmt.databox.models.ShipmentPickUpResponse + :rtype: ~data_box_management_client.models.ShipmentPickUpResponse :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ShipmentPickUpResponse"] + cls = kwargs.pop('cls', None) # type: ClsType["models.ShipmentPickUpResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -699,7 +771,8 @@ def book_shipment_pick_up( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ShipmentPickUpResponse', pipeline_response) @@ -713,7 +786,7 @@ def cancel( self, resource_group_name, # type: str job_name, # type: str - cancellation_reason, # type: "_models.CancellationReason" + cancellation_reason, # type: "models.CancellationReason" **kwargs # type: Any ): # type: (...) -> None @@ -725,7 +798,7 @@ def cancel( must be between 3 and 24 characters in length and use any alphanumeric and underscore only. :type job_name: str :param cancellation_reason: Reason for cancellation. - :type cancellation_reason: ~azure.mgmt.databox.models.CancellationReason + :type cancellation_reason: ~data_box_management_client.models.CancellationReason :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None @@ -736,7 +809,7 @@ def cancel( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -767,7 +840,8 @@ def cancel( if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) @@ -780,7 +854,7 @@ def list_credentials( job_name, # type: str **kwargs # type: Any ): - # type: (...) -> Iterable["_models.UnencryptedCredentialsList"] + # type: (...) -> Iterable["models.UnencryptedCredentialsList"] """This method gets the unencrypted secrets related to the job. :param resource_group_name: The Resource Group Name. @@ -790,15 +864,15 @@ def list_credentials( :type job_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either UnencryptedCredentialsList or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databox.models.UnencryptedCredentialsList] + :rtype: ~azure.core.paging.ItemPaged[~data_box_management_client.models.UnencryptedCredentialsList] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.UnencryptedCredentialsList"] + cls = kwargs.pop('cls', None) # type: ClsType["models.UnencryptedCredentialsList"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" accept = "application/json" def prepare_request(next_link=None): @@ -840,8 +914,9 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/operations/_operations.py b/src/databox/azext_databox/vendored_sdks/databox/operations/_operations.py similarity index 88% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/operations/_operations.py rename to src/databox/azext_databox/vendored_sdks/databox/operations/_operations.py index cc8b3483362..00c0434619c 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/operations/_operations.py +++ b/src/databox/azext_databox/vendored_sdks/databox/operations/_operations.py @@ -14,7 +14,7 @@ from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.mgmt.core.exceptions import ARMErrorFormat -from .. import models as _models +from .. import models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports @@ -30,14 +30,14 @@ class Operations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.databox.models + :type models: ~data_box_management_client.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ - models = _models + models = models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -49,20 +49,20 @@ def list( self, **kwargs # type: Any ): - # type: (...) -> Iterable["_models.OperationList"] + # type: (...) -> Iterable["models.OperationList"] """This method gets all the operations. :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either OperationList or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databox.models.OperationList] + :rtype: ~azure.core.paging.ItemPaged[~data_box_management_client.models.OperationList] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationList"] + cls = kwargs.pop('cls', None) # type: ClsType["models.OperationList"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" accept = "application/json" def prepare_request(next_link=None): @@ -98,8 +98,9 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/operations/_service_operations.py b/src/databox/azext_databox/vendored_sdks/databox/operations/_service_operations.py similarity index 73% rename from src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/operations/_service_operations.py rename to src/databox/azext_databox/vendored_sdks/databox/operations/_service_operations.py index f8cf7bdbaa3..760f55e1591 100644 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/operations/_service_operations.py +++ b/src/databox/azext_databox/vendored_sdks/databox/operations/_service_operations.py @@ -14,7 +14,7 @@ from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.mgmt.core.exceptions import ARMErrorFormat -from .. import models as _models +from .. import models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports @@ -30,14 +30,14 @@ class ServiceOperations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.databox.models + :type models: ~data_box_management_client.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ - models = _models + models = models def __init__(self, client, config, serializer, deserializer): self._client = client @@ -45,96 +45,14 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config - def list_available_skus( - self, - location, # type: str - available_sku_request, # type: "_models.AvailableSkuRequest" - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.AvailableSkusResult"] - """This method provides the list of available skus for the given subscription and location. - - :param location: The location of the resource. - :type location: str - :param available_sku_request: Filters for showing the available skus. - :type available_sku_request: ~azure.mgmt.databox.models.AvailableSkuRequest - :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either AvailableSkusResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databox.models.AvailableSkusResult] - :raises: ~azure.core.exceptions.HttpResponseError - """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.AvailableSkusResult"] - error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" - content_type = "application/json" - accept = "application/json" - - def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - if not next_link: - # Construct URL - url = self.list_available_skus.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'location': self._serialize.url("location", location, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(available_sku_request, 'AvailableSkuRequest') - body_content_kwargs['content'] = body_content - request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(available_sku_request, 'AvailableSkuRequest') - body_content_kwargs['content'] = body_content - request = self._client.get(url, query_parameters, header_parameters, **body_content_kwargs) - return request - - def extract_data(pipeline_response): - deserialized = self._deserialize('AvailableSkusResult', pipeline_response) - list_of_elem = deserialized.value - if cls: - list_of_elem = cls(list_of_elem) - return deserialized.next_link or None, iter(list_of_elem) - - def get_next(next_link=None): - request = prepare_request(next_link) - - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) - - return pipeline_response - - return ItemPaged( - get_next, extract_data - ) - list_available_skus.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataBox/locations/{location}/availableSkus'} # type: ignore - def list_available_skus_by_resource_group( self, resource_group_name, # type: str location, # type: str - available_sku_request, # type: "_models.AvailableSkuRequest" + available_sku_request, # type: "models.AvailableSkuRequest" **kwargs # type: Any ): - # type: (...) -> Iterable["_models.AvailableSkusResult"] + # type: (...) -> Iterable["models.AvailableSkusResult"] """This method provides the list of available skus for the given subscription, resource group and location. @@ -143,18 +61,18 @@ def list_available_skus_by_resource_group( :param location: The location of the resource. :type location: str :param available_sku_request: Filters for showing the available skus. - :type available_sku_request: ~azure.mgmt.databox.models.AvailableSkuRequest + :type available_sku_request: ~data_box_management_client.models.AvailableSkuRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either AvailableSkusResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databox.models.AvailableSkusResult] + :rtype: ~azure.core.paging.ItemPaged[~data_box_management_client.models.AvailableSkusResult] :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.AvailableSkusResult"] + cls = kwargs.pop('cls', None) # type: ClsType["models.AvailableSkusResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" content_type = "application/json" accept = "application/json" @@ -204,8 +122,9 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: + error = self._deserialize(models.ApiError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response @@ -217,28 +136,28 @@ def get_next(next_link=None): def validate_address( self, location, # type: str - validate_address, # type: "_models.ValidateAddress" + validate_address, # type: "models.ValidateAddress" **kwargs # type: Any ): - # type: (...) -> "_models.AddressValidationOutput" - """[DEPRECATED NOTICE: This operation will soon be removed] This method validates the customer + # type: (...) -> "models.AddressValidationOutput" + """[DEPRECATED NOTICE: This operation will soon be removed]. This method validates the customer shipping address and provide alternate addresses if any. :param location: The location of the resource. :type location: str :param validate_address: Shipping address of the customer. - :type validate_address: ~azure.mgmt.databox.models.ValidateAddress + :type validate_address: ~data_box_management_client.models.ValidateAddress :keyword callable cls: A custom type or function that will be passed the direct response :return: AddressValidationOutput, or the result of cls(response) - :rtype: ~azure.mgmt.databox.models.AddressValidationOutput + :rtype: ~data_box_management_client.models.AddressValidationOutput :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.AddressValidationOutput"] + cls = kwargs.pop('cls', None) # type: ClsType["models.AddressValidationOutput"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -268,7 +187,8 @@ def validate_address( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('AddressValidationOutput', pipeline_response) @@ -282,10 +202,10 @@ def validate_inputs_by_resource_group( self, resource_group_name, # type: str location, # type: str - validation_request, # type: "_models.ValidationRequest" + validation_request, # type: "models.ValidationRequest" **kwargs # type: Any ): - # type: (...) -> "_models.ValidationResponse" + # type: (...) -> "models.ValidationResponse" """This method does all necessary pre-job creation validation under resource group. :param resource_group_name: The Resource Group Name. @@ -293,18 +213,18 @@ def validate_inputs_by_resource_group( :param location: The location of the resource. :type location: str :param validation_request: Inputs of the customer. - :type validation_request: ~azure.mgmt.databox.models.ValidationRequest + :type validation_request: ~data_box_management_client.models.ValidationRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ValidationResponse, or the result of cls(response) - :rtype: ~azure.mgmt.databox.models.ValidationResponse + :rtype: ~data_box_management_client.models.ValidationResponse :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ValidationResponse"] + cls = kwargs.pop('cls', None) # type: ClsType["models.ValidationResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -335,7 +255,8 @@ def validate_inputs_by_resource_group( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ValidationResponse', pipeline_response) @@ -348,27 +269,27 @@ def validate_inputs_by_resource_group( def validate_inputs( self, location, # type: str - validation_request, # type: "_models.ValidationRequest" + validation_request, # type: "models.ValidationRequest" **kwargs # type: Any ): - # type: (...) -> "_models.ValidationResponse" + # type: (...) -> "models.ValidationResponse" """This method does all necessary pre-job creation validation under subscription. :param location: The location of the resource. :type location: str :param validation_request: Inputs of the customer. - :type validation_request: ~azure.mgmt.databox.models.ValidationRequest + :type validation_request: ~data_box_management_client.models.ValidationRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ValidationResponse, or the result of cls(response) - :rtype: ~azure.mgmt.databox.models.ValidationResponse + :rtype: ~data_box_management_client.models.ValidationResponse :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ValidationResponse"] + cls = kwargs.pop('cls', None) # type: ClsType["models.ValidationResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -398,7 +319,8 @@ def validate_inputs( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ValidationResponse', pipeline_response) @@ -411,27 +333,28 @@ def validate_inputs( def region_configuration( self, location, # type: str - region_configuration_request, # type: "_models.RegionConfigurationRequest" + region_configuration_request, # type: "models.RegionConfigurationRequest" **kwargs # type: Any ): - # type: (...) -> "_models.RegionConfigurationResponse" - """This API provides configuration details specific to given region/location. + # type: (...) -> "models.RegionConfigurationResponse" + """This API provides configuration details specific to given region/location at Subscription + level. :param location: The location of the resource. :type location: str :param region_configuration_request: Request body to get the configuration for the region. - :type region_configuration_request: ~azure.mgmt.databox.models.RegionConfigurationRequest + :type region_configuration_request: ~data_box_management_client.models.RegionConfigurationRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: RegionConfigurationResponse, or the result of cls(response) - :rtype: ~azure.mgmt.databox.models.RegionConfigurationResponse + :rtype: ~data_box_management_client.models.RegionConfigurationResponse :raises: ~azure.core.exceptions.HttpResponseError """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.RegionConfigurationResponse"] + cls = kwargs.pop('cls', None) # type: ClsType["models.RegionConfigurationResponse"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-09-01" + api_version = "2021-08-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -461,7 +384,8 @@ def region_configuration( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('RegionConfigurationResponse', pipeline_response) @@ -470,3 +394,73 @@ def region_configuration( return deserialized region_configuration.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataBox/locations/{location}/regionConfiguration'} # type: ignore + + def region_configuration_by_resource_group( + self, + resource_group_name, # type: str + location, # type: str + region_configuration_request, # type: "models.RegionConfigurationRequest" + **kwargs # type: Any + ): + # type: (...) -> "models.RegionConfigurationResponse" + """This API provides configuration details specific to given region/location at Resource group + level. + + :param resource_group_name: The Resource Group Name. + :type resource_group_name: str + :param location: The location of the resource. + :type location: str + :param region_configuration_request: Request body to get the configuration for the region at + resource group level. + :type region_configuration_request: ~data_box_management_client.models.RegionConfigurationRequest + :keyword callable cls: A custom type or function that will be passed the direct response + :return: RegionConfigurationResponse, or the result of cls(response) + :rtype: ~data_box_management_client.models.RegionConfigurationResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.RegionConfigurationResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-08-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.region_configuration_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'location': self._serialize.url("location", location, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(region_configuration_request, 'RegionConfigurationRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ApiError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('RegionConfigurationResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + region_configuration_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBox/locations/{location}/regionConfiguration'} # type: ignore diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/_configuration.py b/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/_configuration.py deleted file mode 100644 index ff931804797..00000000000 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/_configuration.py +++ /dev/null @@ -1,71 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from typing import TYPE_CHECKING - -from azure.core.configuration import Configuration -from azure.core.pipeline import policies -from azure.mgmt.core.policies import ARMHttpLoggingPolicy - -from ._version import VERSION - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any - - from azure.core.credentials import TokenCredential - - -class DataBoxManagementClientConfiguration(Configuration): - """Configuration for DataBoxManagementClient. - - Note that all parameters used to create this instance are saved as instance - attributes. - - :param credential: Credential needed for the client to connect to Azure. - :type credential: ~azure.core.credentials.TokenCredential - :param subscription_id: The Subscription Id. - :type subscription_id: str - """ - - def __init__( - self, - credential, # type: "TokenCredential" - subscription_id, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - if credential is None: - raise ValueError("Parameter 'credential' must not be None.") - if subscription_id is None: - raise ValueError("Parameter 'subscription_id' must not be None.") - super(DataBoxManagementClientConfiguration, self).__init__(**kwargs) - - self.credential = credential - self.subscription_id = subscription_id - self.api_version = "2019-09-01" - self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) - kwargs.setdefault('sdk_moniker', 'mgmt-databox/{}'.format(VERSION)) - self._configure(**kwargs) - - def _configure( - self, - **kwargs # type: Any - ): - # type: (...) -> None - self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) - self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) - self.authentication_policy = kwargs.get('authentication_policy') - if self.credential and not self.authentication_policy: - self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/_data_box_management_client.py b/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/_data_box_management_client.py deleted file mode 100644 index e696cb06a78..00000000000 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/_data_box_management_client.py +++ /dev/null @@ -1,80 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from typing import TYPE_CHECKING - -from azure.mgmt.core import ARMPipelineClient -from msrest import Deserializer, Serializer - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Optional - - from azure.core.credentials import TokenCredential - -from ._configuration import DataBoxManagementClientConfiguration -from .operations import Operations -from .operations import JobsOperations -from .operations import ServiceOperations -from . import models - - -class DataBoxManagementClient(object): - """The DataBox Client. - - :ivar operations: Operations operations - :vartype operations: azure.mgmt.databox.operations.Operations - :ivar jobs: JobsOperations operations - :vartype jobs: azure.mgmt.databox.operations.JobsOperations - :ivar service: ServiceOperations operations - :vartype service: azure.mgmt.databox.operations.ServiceOperations - :param credential: Credential needed for the client to connect to Azure. - :type credential: ~azure.core.credentials.TokenCredential - :param subscription_id: The Subscription Id. - :type subscription_id: str - :param str base_url: Service URL - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - """ - - def __init__( - self, - credential, # type: "TokenCredential" - subscription_id, # type: str - base_url=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> None - if not base_url: - base_url = 'https://management.azure.com' - self._config = DataBoxManagementClientConfiguration(credential, subscription_id, **kwargs) - self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) - - client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} - self._serialize = Serializer(client_models) - self._serialize.client_side_validation = False - self._deserialize = Deserializer(client_models) - - self.operations = Operations( - self._client, self._config, self._serialize, self._deserialize) - self.jobs = JobsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.service = ServiceOperations( - self._client, self._config, self._serialize, self._deserialize) - - def close(self): - # type: () -> None - self._client.close() - - def __enter__(self): - # type: () -> DataBoxManagementClient - self._client.__enter__() - return self - - def __exit__(self, *exc_details): - # type: (Any) -> None - self._client.__exit__(*exc_details) diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/_metadata.json b/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/_metadata.json deleted file mode 100644 index 10c0c55446a..00000000000 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/_metadata.json +++ /dev/null @@ -1,63 +0,0 @@ -{ - "chosen_version": "2019-09-01", - "total_api_version_list": ["2019-09-01"], - "client": { - "name": "DataBoxManagementClient", - "filename": "_data_box_management_client", - "description": "The DataBox Client.", - "base_url": "\u0027https://management.azure.com\u0027", - "custom_base_url": null, - "azure_arm": true, - "has_lro_operations": true, - "client_side_validation": false - }, - "global_parameters": { - "sync": { - "credential": { - "signature": "credential, # type: \"TokenCredential\"", - "description": "Credential needed for the client to connect to Azure.", - "docstring_type": "~azure.core.credentials.TokenCredential", - "required": true - }, - "subscription_id": { - "signature": "subscription_id, # type: str", - "description": "The Subscription Id.", - "docstring_type": "str", - "required": true - } - }, - "async": { - "credential": { - "signature": "credential, # type: \"AsyncTokenCredential\"", - "description": "Credential needed for the client to connect to Azure.", - "docstring_type": "~azure.core.credentials_async.AsyncTokenCredential", - "required": true - }, - "subscription_id": { - "signature": "subscription_id, # type: str", - "description": "The Subscription Id.", - "docstring_type": "str", - "required": true - } - }, - "constant": { - }, - "call": "credential, subscription_id" - }, - "config": { - "credential": true, - "credential_scopes": ["https://management.azure.com/.default"], - "credential_default_policy_type": "BearerTokenCredentialPolicy", - "credential_default_policy_type_has_async_version": true, - "credential_key_header_name": null - }, - "operation_groups": { - "operations": "Operations", - "jobs": "JobsOperations", - "service": "ServiceOperations" - }, - "operation_mixins": { - }, - "sync_imports": "None", - "async_imports": "None" -} \ No newline at end of file diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/_configuration.py b/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/_configuration.py deleted file mode 100644 index bd265586067..00000000000 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/_configuration.py +++ /dev/null @@ -1,67 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from typing import Any, TYPE_CHECKING - -from azure.core.configuration import Configuration -from azure.core.pipeline import policies -from azure.mgmt.core.policies import ARMHttpLoggingPolicy - -from .._version import VERSION - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from azure.core.credentials_async import AsyncTokenCredential - - -class DataBoxManagementClientConfiguration(Configuration): - """Configuration for DataBoxManagementClient. - - Note that all parameters used to create this instance are saved as instance - attributes. - - :param credential: Credential needed for the client to connect to Azure. - :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :param subscription_id: The Subscription Id. - :type subscription_id: str - """ - - def __init__( - self, - credential: "AsyncTokenCredential", - subscription_id: str, - **kwargs: Any - ) -> None: - if credential is None: - raise ValueError("Parameter 'credential' must not be None.") - if subscription_id is None: - raise ValueError("Parameter 'subscription_id' must not be None.") - super(DataBoxManagementClientConfiguration, self).__init__(**kwargs) - - self.credential = credential - self.subscription_id = subscription_id - self.api_version = "2019-09-01" - self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) - kwargs.setdefault('sdk_moniker', 'mgmt-databox/{}'.format(VERSION)) - self._configure(**kwargs) - - def _configure( - self, - **kwargs: Any - ) -> None: - self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) - self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) - self.authentication_policy = kwargs.get('authentication_policy') - if self.credential and not self.authentication_policy: - self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/_data_box_management_client.py b/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/_data_box_management_client.py deleted file mode 100644 index b192ff7cd77..00000000000 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/aio/_data_box_management_client.py +++ /dev/null @@ -1,74 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from typing import Any, Optional, TYPE_CHECKING - -from azure.mgmt.core import AsyncARMPipelineClient -from msrest import Deserializer, Serializer - -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from azure.core.credentials_async import AsyncTokenCredential - -from ._configuration import DataBoxManagementClientConfiguration -from .operations import Operations -from .operations import JobsOperations -from .operations import ServiceOperations -from .. import models - - -class DataBoxManagementClient(object): - """The DataBox Client. - - :ivar operations: Operations operations - :vartype operations: azure.mgmt.databox.aio.operations.Operations - :ivar jobs: JobsOperations operations - :vartype jobs: azure.mgmt.databox.aio.operations.JobsOperations - :ivar service: ServiceOperations operations - :vartype service: azure.mgmt.databox.aio.operations.ServiceOperations - :param credential: Credential needed for the client to connect to Azure. - :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :param subscription_id: The Subscription Id. - :type subscription_id: str - :param str base_url: Service URL - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - """ - - def __init__( - self, - credential: "AsyncTokenCredential", - subscription_id: str, - base_url: Optional[str] = None, - **kwargs: Any - ) -> None: - if not base_url: - base_url = 'https://management.azure.com' - self._config = DataBoxManagementClientConfiguration(credential, subscription_id, **kwargs) - self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) - - client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} - self._serialize = Serializer(client_models) - self._serialize.client_side_validation = False - self._deserialize = Deserializer(client_models) - - self.operations = Operations( - self._client, self._config, self._serialize, self._deserialize) - self.jobs = JobsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.service = ServiceOperations( - self._client, self._config, self._serialize, self._deserialize) - - async def close(self) -> None: - await self._client.close() - - async def __aenter__(self) -> "DataBoxManagementClient": - await self._client.__aenter__() - return self - - async def __aexit__(self, *exc_details) -> None: - await self._client.__aexit__(*exc_details) diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/models/_models_py3.py b/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/models/_models_py3.py deleted file mode 100644 index 89c41698459..00000000000 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/models/_models_py3.py +++ /dev/null @@ -1,3520 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -import datetime -from typing import Dict, List, Optional, Union - -import msrest.serialization - -from ._data_box_management_client_enums import * - - -class AccountCredentialDetails(msrest.serialization.Model): - """Credential details of the account. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar account_name: Name of the account. - :vartype account_name: str - :ivar data_destination_type: Data Destination Type. Possible values include: "StorageAccount", - "ManagedDisk". - :vartype data_destination_type: str or ~azure.mgmt.databox.models.DataDestinationType - :ivar account_connection_string: Connection string of the account endpoint to use the account - as a storage endpoint on the device. - :vartype account_connection_string: str - :ivar share_credential_details: Per share level unencrypted access credentials. - :vartype share_credential_details: list[~azure.mgmt.databox.models.ShareCredentialDetails] - """ - - _validation = { - 'account_name': {'readonly': True}, - 'data_destination_type': {'readonly': True}, - 'account_connection_string': {'readonly': True}, - 'share_credential_details': {'readonly': True}, - } - - _attribute_map = { - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, - 'account_connection_string': {'key': 'accountConnectionString', 'type': 'str'}, - 'share_credential_details': {'key': 'shareCredentialDetails', 'type': '[ShareCredentialDetails]'}, - } - - def __init__( - self, - **kwargs - ): - super(AccountCredentialDetails, self).__init__(**kwargs) - self.account_name = None - self.data_destination_type = None - self.account_connection_string = None - self.share_credential_details = None - - -class AddressValidationOutput(msrest.serialization.Model): - """Output of the address validation api. - - Variables are only populated by the server, and will be ignored when sending a request. - - :param validation_type: Identifies the type of validation response.Constant filled by server. - Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", - "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator - :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error - :ivar validation_status: The address validation status. Possible values include: "Valid", - "Invalid", "Ambiguous". - :vartype validation_status: str or ~azure.mgmt.databox.models.AddressValidationStatus - :ivar alternate_addresses: List of alternate addresses. - :vartype alternate_addresses: list[~azure.mgmt.databox.models.ShippingAddress] - """ - - _validation = { - 'error': {'readonly': True}, - 'validation_status': {'readonly': True}, - 'alternate_addresses': {'readonly': True}, - } - - _attribute_map = { - 'validation_type': {'key': 'properties.validationType', 'type': 'str'}, - 'error': {'key': 'properties.error', 'type': 'Error'}, - 'validation_status': {'key': 'properties.validationStatus', 'type': 'str'}, - 'alternate_addresses': {'key': 'properties.alternateAddresses', 'type': '[ShippingAddress]'}, - } - - def __init__( - self, - **kwargs - ): - super(AddressValidationOutput, self).__init__(**kwargs) - self.validation_type = None # type: Optional[str] - self.error = None - self.validation_status = None - self.alternate_addresses = None - - -class ValidationInputResponse(msrest.serialization.Model): - """Minimum properties that should be present in each individual validation response. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AddressValidationProperties, CreateOrderLimitForSubscriptionValidationResponseProperties, DataDestinationDetailsValidationResponseProperties, PreferencesValidationResponseProperties, SkuAvailabilityValidationResponseProperties, SubscriptionIsAllowedToCreateJobValidationResponseProperties. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", - "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator - :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error - """ - - _validation = { - 'validation_type': {'required': True}, - 'error': {'readonly': True}, - } - - _attribute_map = { - 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, - } - - _subtype_map = { - 'validation_type': {'ValidateAddress': 'AddressValidationProperties', 'ValidateCreateOrderLimit': 'CreateOrderLimitForSubscriptionValidationResponseProperties', 'ValidateDataDestinationDetails': 'DataDestinationDetailsValidationResponseProperties', 'ValidatePreferences': 'PreferencesValidationResponseProperties', 'ValidateSkuAvailability': 'SkuAvailabilityValidationResponseProperties', 'ValidateSubscriptionIsAllowedToCreateJob': 'SubscriptionIsAllowedToCreateJobValidationResponseProperties'} - } - - def __init__( - self, - **kwargs - ): - super(ValidationInputResponse, self).__init__(**kwargs) - self.validation_type = None # type: Optional[str] - self.error = None - - -class AddressValidationProperties(ValidationInputResponse): - """The address validation output. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", - "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator - :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error - :ivar validation_status: The address validation status. Possible values include: "Valid", - "Invalid", "Ambiguous". - :vartype validation_status: str or ~azure.mgmt.databox.models.AddressValidationStatus - :ivar alternate_addresses: List of alternate addresses. - :vartype alternate_addresses: list[~azure.mgmt.databox.models.ShippingAddress] - """ - - _validation = { - 'validation_type': {'required': True}, - 'error': {'readonly': True}, - 'validation_status': {'readonly': True}, - 'alternate_addresses': {'readonly': True}, - } - - _attribute_map = { - 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, - 'validation_status': {'key': 'validationStatus', 'type': 'str'}, - 'alternate_addresses': {'key': 'alternateAddresses', 'type': '[ShippingAddress]'}, - } - - def __init__( - self, - **kwargs - ): - super(AddressValidationProperties, self).__init__(**kwargs) - self.validation_type = 'ValidateAddress' # type: str - self.validation_status = None - self.alternate_addresses = None - - -class ApplianceNetworkConfiguration(msrest.serialization.Model): - """The Network Adapter configuration of a DataBox. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar name: Name of the network. - :vartype name: str - :ivar mac_address: Mac Address. - :vartype mac_address: str - """ - - _validation = { - 'name': {'readonly': True}, - 'mac_address': {'readonly': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'mac_address': {'key': 'macAddress', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ApplianceNetworkConfiguration, self).__init__(**kwargs) - self.name = None - self.mac_address = None - - -class ArmBaseObject(msrest.serialization.Model): - """Base class for all objects under resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar name: Name of the object. - :vartype name: str - :ivar id: Id of the object. - :vartype id: str - :ivar type: Type of the object. - :vartype type: str - """ - - _validation = { - 'name': {'readonly': True}, - 'id': {'readonly': True}, - 'type': {'readonly': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'id': {'key': 'id', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ArmBaseObject, self).__init__(**kwargs) - self.name = None - self.id = None - self.type = None - - -class AvailableSkuRequest(msrest.serialization.Model): - """The filters for showing the available skus. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar transfer_type: Required. Type of the transfer. Default value: "ImportToAzure". - :vartype transfer_type: str - :param country: Required. ISO country code. Country for hardware shipment. For codes check: - https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements. - :type country: str - :param location: Required. Location for data transfer. For locations check: - https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. - :type location: str - :param sku_names: Sku Names to filter for available skus. - :type sku_names: list[str or ~azure.mgmt.databox.models.SkuName] - """ - - _validation = { - 'transfer_type': {'required': True, 'constant': True}, - 'country': {'required': True}, - 'location': {'required': True}, - } - - _attribute_map = { - 'transfer_type': {'key': 'transferType', 'type': 'str'}, - 'country': {'key': 'country', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'sku_names': {'key': 'skuNames', 'type': '[str]'}, - } - - transfer_type = "ImportToAzure" - - def __init__( - self, - *, - country: str, - location: str, - sku_names: Optional[List[Union[str, "SkuName"]]] = None, - **kwargs - ): - super(AvailableSkuRequest, self).__init__(**kwargs) - self.country = country - self.location = location - self.sku_names = sku_names - - -class AvailableSkusResult(msrest.serialization.Model): - """The available skus operation response. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: List of available skus. - :vartype value: list[~azure.mgmt.databox.models.SkuInformation] - :param next_link: Link for the next set of skus. - :type next_link: str - """ - - _validation = { - 'value': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[SkuInformation]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - *, - next_link: Optional[str] = None, - **kwargs - ): - super(AvailableSkusResult, self).__init__(**kwargs) - self.value = None - self.next_link = next_link - - -class CancellationReason(msrest.serialization.Model): - """Reason for cancellation. - - All required parameters must be populated in order to send to Azure. - - :param reason: Required. Reason for cancellation. - :type reason: str - """ - - _validation = { - 'reason': {'required': True}, - } - - _attribute_map = { - 'reason': {'key': 'reason', 'type': 'str'}, - } - - def __init__( - self, - *, - reason: str, - **kwargs - ): - super(CancellationReason, self).__init__(**kwargs) - self.reason = reason - - -class CloudError(msrest.serialization.Model): - """The error information object. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar code: Error code string. - :vartype code: str - :ivar message: Descriptive error information. - :vartype message: str - :param target: Error target. - :type target: str - :param details: More detailed error information. - :type details: list[~azure.mgmt.databox.models.CloudError] - """ - - _validation = { - 'code': {'readonly': True}, - 'message': {'readonly': True}, - } - - _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - 'target': {'key': 'target', 'type': 'str'}, - 'details': {'key': 'details', 'type': '[CloudError]'}, - } - - def __init__( - self, - *, - target: Optional[str] = None, - details: Optional[List["CloudError"]] = None, - **kwargs - ): - super(CloudError, self).__init__(**kwargs) - self.code = None - self.message = None - self.target = target - self.details = details - - -class ContactDetails(msrest.serialization.Model): - """Contact Details. - - All required parameters must be populated in order to send to Azure. - - :param contact_name: Required. Contact name of the person. - :type contact_name: str - :param phone: Required. Phone number of the contact person. - :type phone: str - :param phone_extension: Phone extension number of the contact person. - :type phone_extension: str - :param mobile: Mobile number of the contact person. - :type mobile: str - :param email_list: Required. List of Email-ids to be notified about job progress. - :type email_list: list[str] - :param notification_preference: Notification preference for a job stage. - :type notification_preference: list[~azure.mgmt.databox.models.NotificationPreference] - """ - - _validation = { - 'contact_name': {'required': True}, - 'phone': {'required': True}, - 'email_list': {'required': True}, - } - - _attribute_map = { - 'contact_name': {'key': 'contactName', 'type': 'str'}, - 'phone': {'key': 'phone', 'type': 'str'}, - 'phone_extension': {'key': 'phoneExtension', 'type': 'str'}, - 'mobile': {'key': 'mobile', 'type': 'str'}, - 'email_list': {'key': 'emailList', 'type': '[str]'}, - 'notification_preference': {'key': 'notificationPreference', 'type': '[NotificationPreference]'}, - } - - def __init__( - self, - *, - contact_name: str, - phone: str, - email_list: List[str], - phone_extension: Optional[str] = None, - mobile: Optional[str] = None, - notification_preference: Optional[List["NotificationPreference"]] = None, - **kwargs - ): - super(ContactDetails, self).__init__(**kwargs) - self.contact_name = contact_name - self.phone = phone - self.phone_extension = phone_extension - self.mobile = mobile - self.email_list = email_list - self.notification_preference = notification_preference - - -class CopyLogDetails(msrest.serialization.Model): - """Details for log generated during copy. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DataBoxAccountCopyLogDetails, DataBoxDiskCopyLogDetails, DataBoxHeavyAccountCopyLogDetails. - - All required parameters must be populated in order to send to Azure. - - :param copy_log_details_type: Required. Indicates the type of job details.Constant filled by - server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type copy_log_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - """ - - _validation = { - 'copy_log_details_type': {'required': True}, - } - - _attribute_map = { - 'copy_log_details_type': {'key': 'copyLogDetailsType', 'type': 'str'}, - } - - _subtype_map = { - 'copy_log_details_type': {'DataBox': 'DataBoxAccountCopyLogDetails', 'DataBoxDisk': 'DataBoxDiskCopyLogDetails', 'DataBoxHeavy': 'DataBoxHeavyAccountCopyLogDetails'} - } - - def __init__( - self, - **kwargs - ): - super(CopyLogDetails, self).__init__(**kwargs) - self.copy_log_details_type = None # type: Optional[str] - - -class CopyProgress(msrest.serialization.Model): - """Copy progress. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar storage_account_name: Name of the storage account where the data needs to be uploaded. - :vartype storage_account_name: str - :ivar data_destination_type: Data Destination Type. Possible values include: "StorageAccount", - "ManagedDisk". - :vartype data_destination_type: str or ~azure.mgmt.databox.models.DataDestinationType - :ivar account_id: Id of the account where the data needs to be uploaded. - :vartype account_id: str - :ivar bytes_sent_to_cloud: Amount of data uploaded by the job as of now. - :vartype bytes_sent_to_cloud: long - :ivar total_bytes_to_process: Total amount of data to be processed by the job. - :vartype total_bytes_to_process: long - :ivar files_processed: Number of files processed by the job as of now. - :vartype files_processed: long - :ivar total_files_to_process: Total number of files to be processed by the job. - :vartype total_files_to_process: long - :ivar invalid_files_processed: Number of files not adhering to azure naming conventions which - were processed by automatic renaming. - :vartype invalid_files_processed: long - :ivar invalid_file_bytes_uploaded: Total amount of data not adhering to azure naming - conventions which were processed by automatic renaming. - :vartype invalid_file_bytes_uploaded: long - :ivar renamed_container_count: Number of folders not adhering to azure naming conventions which - were processed by automatic renaming. - :vartype renamed_container_count: long - :ivar files_errored_out: Number of files which could not be copied. - :vartype files_errored_out: long - """ - - _validation = { - 'storage_account_name': {'readonly': True}, - 'data_destination_type': {'readonly': True}, - 'account_id': {'readonly': True}, - 'bytes_sent_to_cloud': {'readonly': True}, - 'total_bytes_to_process': {'readonly': True}, - 'files_processed': {'readonly': True}, - 'total_files_to_process': {'readonly': True}, - 'invalid_files_processed': {'readonly': True}, - 'invalid_file_bytes_uploaded': {'readonly': True}, - 'renamed_container_count': {'readonly': True}, - 'files_errored_out': {'readonly': True}, - } - - _attribute_map = { - 'storage_account_name': {'key': 'storageAccountName', 'type': 'str'}, - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, - 'account_id': {'key': 'accountId', 'type': 'str'}, - 'bytes_sent_to_cloud': {'key': 'bytesSentToCloud', 'type': 'long'}, - 'total_bytes_to_process': {'key': 'totalBytesToProcess', 'type': 'long'}, - 'files_processed': {'key': 'filesProcessed', 'type': 'long'}, - 'total_files_to_process': {'key': 'totalFilesToProcess', 'type': 'long'}, - 'invalid_files_processed': {'key': 'invalidFilesProcessed', 'type': 'long'}, - 'invalid_file_bytes_uploaded': {'key': 'invalidFileBytesUploaded', 'type': 'long'}, - 'renamed_container_count': {'key': 'renamedContainerCount', 'type': 'long'}, - 'files_errored_out': {'key': 'filesErroredOut', 'type': 'long'}, - } - - def __init__( - self, - **kwargs - ): - super(CopyProgress, self).__init__(**kwargs) - self.storage_account_name = None - self.data_destination_type = None - self.account_id = None - self.bytes_sent_to_cloud = None - self.total_bytes_to_process = None - self.files_processed = None - self.total_files_to_process = None - self.invalid_files_processed = None - self.invalid_file_bytes_uploaded = None - self.renamed_container_count = None - self.files_errored_out = None - - -class ValidationRequest(msrest.serialization.Model): - """Input request for all pre job creation validation. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CreateJobValidations. - - All required parameters must be populated in order to send to Azure. - - :param individual_request_details: Required. List of request details contain validationType and - its request as key and value respectively. - :type individual_request_details: list[~azure.mgmt.databox.models.ValidationInputRequest] - :param validation_category: Required. Identify the nature of validation.Constant filled by - server. - :type validation_category: str - """ - - _validation = { - 'individual_request_details': {'required': True}, - 'validation_category': {'required': True}, - } - - _attribute_map = { - 'individual_request_details': {'key': 'individualRequestDetails', 'type': '[ValidationInputRequest]'}, - 'validation_category': {'key': 'validationCategory', 'type': 'str'}, - } - - _subtype_map = { - 'validation_category': {'JobCreationValidation': 'CreateJobValidations'} - } - - def __init__( - self, - *, - individual_request_details: List["ValidationInputRequest"], - **kwargs - ): - super(ValidationRequest, self).__init__(**kwargs) - self.individual_request_details = individual_request_details - self.validation_category = None # type: Optional[str] - - -class CreateJobValidations(ValidationRequest): - """It does all pre-job creation validations. - - All required parameters must be populated in order to send to Azure. - - :param individual_request_details: Required. List of request details contain validationType and - its request as key and value respectively. - :type individual_request_details: list[~azure.mgmt.databox.models.ValidationInputRequest] - :param validation_category: Required. Identify the nature of validation.Constant filled by - server. - :type validation_category: str - """ - - _validation = { - 'individual_request_details': {'required': True}, - 'validation_category': {'required': True}, - } - - _attribute_map = { - 'individual_request_details': {'key': 'individualRequestDetails', 'type': '[ValidationInputRequest]'}, - 'validation_category': {'key': 'validationCategory', 'type': 'str'}, - } - - def __init__( - self, - *, - individual_request_details: List["ValidationInputRequest"], - **kwargs - ): - super(CreateJobValidations, self).__init__(individual_request_details=individual_request_details, **kwargs) - self.validation_category = 'JobCreationValidation' # type: str - - -class ValidationInputRequest(msrest.serialization.Model): - """Minimum fields that must be present in any type of validation request. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ValidateAddress, CreateOrderLimitForSubscriptionValidationRequest, DataDestinationDetailsValidationRequest, PreferencesValidationRequest, SkuAvailabilityValidationRequest, SubscriptionIsAllowedToCreateJobValidationRequest. - - All required parameters must be populated in order to send to Azure. - - :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", - "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator - """ - - _validation = { - 'validation_type': {'required': True}, - } - - _attribute_map = { - 'validation_type': {'key': 'validationType', 'type': 'str'}, - } - - _subtype_map = { - 'validation_type': {'ValidateAddress': 'ValidateAddress', 'ValidateCreateOrderLimit': 'CreateOrderLimitForSubscriptionValidationRequest', 'ValidateDataDestinationDetails': 'DataDestinationDetailsValidationRequest', 'ValidatePreferences': 'PreferencesValidationRequest', 'ValidateSkuAvailability': 'SkuAvailabilityValidationRequest', 'ValidateSubscriptionIsAllowedToCreateJob': 'SubscriptionIsAllowedToCreateJobValidationRequest'} - } - - def __init__( - self, - **kwargs - ): - super(ValidationInputRequest, self).__init__(**kwargs) - self.validation_type = None # type: Optional[str] - - -class CreateOrderLimitForSubscriptionValidationRequest(ValidationInputRequest): - """Request to validate create order limit for current subscription. - - All required parameters must be populated in order to send to Azure. - - :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", - "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator - :param device_type: Required. Device type to be used for the job. Possible values include: - "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type device_type: str or ~azure.mgmt.databox.models.SkuName - """ - - _validation = { - 'validation_type': {'required': True}, - 'device_type': {'required': True}, - } - - _attribute_map = { - 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'device_type': {'key': 'deviceType', 'type': 'str'}, - } - - def __init__( - self, - *, - device_type: Union[str, "SkuName"], - **kwargs - ): - super(CreateOrderLimitForSubscriptionValidationRequest, self).__init__(**kwargs) - self.validation_type = 'ValidateCreateOrderLimit' # type: str - self.device_type = device_type - - -class CreateOrderLimitForSubscriptionValidationResponseProperties(ValidationInputResponse): - """Properties of create order limit for subscription validation response. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", - "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator - :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error - :ivar status: Create order limit validation status. Possible values include: "Valid", - "Invalid", "Skipped". - :vartype status: str or ~azure.mgmt.databox.models.ValidationStatus - """ - - _validation = { - 'validation_type': {'required': True}, - 'error': {'readonly': True}, - 'status': {'readonly': True}, - } - - _attribute_map = { - 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, - 'status': {'key': 'status', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(CreateOrderLimitForSubscriptionValidationResponseProperties, self).__init__(**kwargs) - self.validation_type = 'ValidateCreateOrderLimit' # type: str - self.status = None - - -class DataBoxAccountCopyLogDetails(CopyLogDetails): - """Copy log details for a storage account of a DataBox job. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param copy_log_details_type: Required. Indicates the type of job details.Constant filled by - server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type copy_log_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :ivar account_name: Destination account name. - :vartype account_name: str - :ivar copy_log_link: Link for copy logs. - :vartype copy_log_link: str - """ - - _validation = { - 'copy_log_details_type': {'required': True}, - 'account_name': {'readonly': True}, - 'copy_log_link': {'readonly': True}, - } - - _attribute_map = { - 'copy_log_details_type': {'key': 'copyLogDetailsType', 'type': 'str'}, - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'copy_log_link': {'key': 'copyLogLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DataBoxAccountCopyLogDetails, self).__init__(**kwargs) - self.copy_log_details_type = 'DataBox' # type: str - self.account_name = None - self.copy_log_link = None - - -class DataBoxDiskCopyLogDetails(CopyLogDetails): - """Copy Log Details for a disk. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param copy_log_details_type: Required. Indicates the type of job details.Constant filled by - server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type copy_log_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :ivar disk_serial_number: Disk Serial Number. - :vartype disk_serial_number: str - :ivar error_log_link: Link for copy error logs. - :vartype error_log_link: str - :ivar verbose_log_link: Link for copy verbose logs. - :vartype verbose_log_link: str - """ - - _validation = { - 'copy_log_details_type': {'required': True}, - 'disk_serial_number': {'readonly': True}, - 'error_log_link': {'readonly': True}, - 'verbose_log_link': {'readonly': True}, - } - - _attribute_map = { - 'copy_log_details_type': {'key': 'copyLogDetailsType', 'type': 'str'}, - 'disk_serial_number': {'key': 'diskSerialNumber', 'type': 'str'}, - 'error_log_link': {'key': 'errorLogLink', 'type': 'str'}, - 'verbose_log_link': {'key': 'verboseLogLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DataBoxDiskCopyLogDetails, self).__init__(**kwargs) - self.copy_log_details_type = 'DataBoxDisk' # type: str - self.disk_serial_number = None - self.error_log_link = None - self.verbose_log_link = None - - -class DataBoxDiskCopyProgress(msrest.serialization.Model): - """DataBox Disk Copy Progress. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar serial_number: The serial number of the disk. - :vartype serial_number: str - :ivar bytes_copied: Bytes copied during the copy of disk. - :vartype bytes_copied: long - :ivar percent_complete: Indicates the percentage completed for the copy of the disk. - :vartype percent_complete: int - :ivar status: The Status of the copy. Possible values include: "NotStarted", "InProgress", - "Completed", "CompletedWithErrors", "Failed", "NotReturned", "HardwareError", - "DeviceFormatted", "DeviceMetadataModified", "StorageAccountNotAccessible", "UnsupportedData". - :vartype status: str or ~azure.mgmt.databox.models.CopyStatus - """ - - _validation = { - 'serial_number': {'readonly': True}, - 'bytes_copied': {'readonly': True}, - 'percent_complete': {'readonly': True}, - 'status': {'readonly': True}, - } - - _attribute_map = { - 'serial_number': {'key': 'serialNumber', 'type': 'str'}, - 'bytes_copied': {'key': 'bytesCopied', 'type': 'long'}, - 'percent_complete': {'key': 'percentComplete', 'type': 'int'}, - 'status': {'key': 'status', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DataBoxDiskCopyProgress, self).__init__(**kwargs) - self.serial_number = None - self.bytes_copied = None - self.percent_complete = None - self.status = None - - -class JobDetails(msrest.serialization.Model): - """Job details. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DataBoxJobDetails, DataBoxDiskJobDetails, DataBoxHeavyJobDetails. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param expected_data_size_in_terabytes: The expected size of the data, which needs to be - transferred in this job, in terabytes. - :type expected_data_size_in_terabytes: int - :ivar job_stages: List of stages that run in the job. - :vartype job_stages: list[~azure.mgmt.databox.models.JobStages] - :param contact_details: Required. Contact details for notification and shipping. - :type contact_details: ~azure.mgmt.databox.models.ContactDetails - :param shipping_address: Required. Shipping address of the customer. - :type shipping_address: ~azure.mgmt.databox.models.ShippingAddress - :ivar delivery_package: Delivery package shipping details. - :vartype delivery_package: ~azure.mgmt.databox.models.PackageShippingDetails - :ivar return_package: Return package shipping details. - :vartype return_package: ~azure.mgmt.databox.models.PackageShippingDetails - :param destination_account_details: Required. Destination account details. - :type destination_account_details: list[~azure.mgmt.databox.models.DestinationAccountDetails] - :ivar error_details: Error details for failure. This is optional. - :vartype error_details: list[~azure.mgmt.databox.models.JobErrorDetails] - :param job_details_type: Required. Indicates the type of job details.Constant filled by server. - Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :param preferences: Preferences for the order. - :type preferences: ~azure.mgmt.databox.models.Preferences - :ivar copy_log_details: List of copy log details. - :vartype copy_log_details: list[~azure.mgmt.databox.models.CopyLogDetails] - :ivar reverse_shipment_label_sas_key: Shared access key to download the return shipment label. - :vartype reverse_shipment_label_sas_key: str - :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. - :vartype chain_of_custody_sas_key: str - """ - - _validation = { - 'job_stages': {'readonly': True}, - 'contact_details': {'required': True}, - 'shipping_address': {'required': True}, - 'delivery_package': {'readonly': True}, - 'return_package': {'readonly': True}, - 'destination_account_details': {'required': True}, - 'error_details': {'readonly': True}, - 'job_details_type': {'required': True}, - 'copy_log_details': {'readonly': True}, - 'reverse_shipment_label_sas_key': {'readonly': True}, - 'chain_of_custody_sas_key': {'readonly': True}, - } - - _attribute_map = { - 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, - 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, - 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, - 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, - 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, - 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, - 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, - 'preferences': {'key': 'preferences', 'type': 'Preferences'}, - 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, - 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, - 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, - } - - _subtype_map = { - 'job_details_type': {'DataBox': 'DataBoxJobDetails', 'DataBoxDisk': 'DataBoxDiskJobDetails', 'DataBoxHeavy': 'DataBoxHeavyJobDetails'} - } - - def __init__( - self, - *, - contact_details: "ContactDetails", - shipping_address: "ShippingAddress", - destination_account_details: List["DestinationAccountDetails"], - expected_data_size_in_terabytes: Optional[int] = None, - preferences: Optional["Preferences"] = None, - **kwargs - ): - super(JobDetails, self).__init__(**kwargs) - self.expected_data_size_in_terabytes = expected_data_size_in_terabytes - self.job_stages = None - self.contact_details = contact_details - self.shipping_address = shipping_address - self.delivery_package = None - self.return_package = None - self.destination_account_details = destination_account_details - self.error_details = None - self.job_details_type = None # type: Optional[str] - self.preferences = preferences - self.copy_log_details = None - self.reverse_shipment_label_sas_key = None - self.chain_of_custody_sas_key = None - - -class DataBoxDiskJobDetails(JobDetails): - """DataBox Disk Job Details. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param expected_data_size_in_terabytes: The expected size of the data, which needs to be - transferred in this job, in terabytes. - :type expected_data_size_in_terabytes: int - :ivar job_stages: List of stages that run in the job. - :vartype job_stages: list[~azure.mgmt.databox.models.JobStages] - :param contact_details: Required. Contact details for notification and shipping. - :type contact_details: ~azure.mgmt.databox.models.ContactDetails - :param shipping_address: Required. Shipping address of the customer. - :type shipping_address: ~azure.mgmt.databox.models.ShippingAddress - :ivar delivery_package: Delivery package shipping details. - :vartype delivery_package: ~azure.mgmt.databox.models.PackageShippingDetails - :ivar return_package: Return package shipping details. - :vartype return_package: ~azure.mgmt.databox.models.PackageShippingDetails - :param destination_account_details: Required. Destination account details. - :type destination_account_details: list[~azure.mgmt.databox.models.DestinationAccountDetails] - :ivar error_details: Error details for failure. This is optional. - :vartype error_details: list[~azure.mgmt.databox.models.JobErrorDetails] - :param job_details_type: Required. Indicates the type of job details.Constant filled by server. - Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :param preferences: Preferences for the order. - :type preferences: ~azure.mgmt.databox.models.Preferences - :ivar copy_log_details: List of copy log details. - :vartype copy_log_details: list[~azure.mgmt.databox.models.CopyLogDetails] - :ivar reverse_shipment_label_sas_key: Shared access key to download the return shipment label. - :vartype reverse_shipment_label_sas_key: str - :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. - :vartype chain_of_custody_sas_key: str - :param preferred_disks: User preference on what size disks are needed for the job. The map is - from the disk size in TB to the count. Eg. {2,5} means 5 disks of 2 TB size. Key is string but - will be checked against an int. - :type preferred_disks: dict[str, int] - :ivar copy_progress: Copy progress per disk. - :vartype copy_progress: list[~azure.mgmt.databox.models.DataBoxDiskCopyProgress] - :ivar disks_and_size_details: Contains the map of disk serial number to the disk size being - used for the job. Is returned only after the disks are shipped to the customer. - :vartype disks_and_size_details: dict[str, int] - :param passkey: User entered passkey for DataBox Disk job. - :type passkey: str - """ - - _validation = { - 'job_stages': {'readonly': True}, - 'contact_details': {'required': True}, - 'shipping_address': {'required': True}, - 'delivery_package': {'readonly': True}, - 'return_package': {'readonly': True}, - 'destination_account_details': {'required': True}, - 'error_details': {'readonly': True}, - 'job_details_type': {'required': True}, - 'copy_log_details': {'readonly': True}, - 'reverse_shipment_label_sas_key': {'readonly': True}, - 'chain_of_custody_sas_key': {'readonly': True}, - 'copy_progress': {'readonly': True}, - 'disks_and_size_details': {'readonly': True}, - } - - _attribute_map = { - 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, - 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, - 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, - 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, - 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, - 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, - 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, - 'preferences': {'key': 'preferences', 'type': 'Preferences'}, - 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, - 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, - 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, - 'preferred_disks': {'key': 'preferredDisks', 'type': '{int}'}, - 'copy_progress': {'key': 'copyProgress', 'type': '[DataBoxDiskCopyProgress]'}, - 'disks_and_size_details': {'key': 'disksAndSizeDetails', 'type': '{int}'}, - 'passkey': {'key': 'passkey', 'type': 'str'}, - } - - def __init__( - self, - *, - contact_details: "ContactDetails", - shipping_address: "ShippingAddress", - destination_account_details: List["DestinationAccountDetails"], - expected_data_size_in_terabytes: Optional[int] = None, - preferences: Optional["Preferences"] = None, - preferred_disks: Optional[Dict[str, int]] = None, - passkey: Optional[str] = None, - **kwargs - ): - super(DataBoxDiskJobDetails, self).__init__(expected_data_size_in_terabytes=expected_data_size_in_terabytes, contact_details=contact_details, shipping_address=shipping_address, destination_account_details=destination_account_details, preferences=preferences, **kwargs) - self.job_details_type = 'DataBoxDisk' # type: str - self.preferred_disks = preferred_disks - self.copy_progress = None - self.disks_and_size_details = None - self.passkey = passkey - - -class JobSecrets(msrest.serialization.Model): - """The base class for the secrets. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DataboxJobSecrets, DataBoxDiskJobSecrets, DataBoxHeavyJobSecrets. - - All required parameters must be populated in order to send to Azure. - - :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant - filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_secrets_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :param dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. - :type dc_access_security_code: ~azure.mgmt.databox.models.DcAccessSecurityCode - """ - - _validation = { - 'job_secrets_type': {'required': True}, - } - - _attribute_map = { - 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, - 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, - } - - _subtype_map = { - 'job_secrets_type': {'DataBox': 'DataboxJobSecrets', 'DataBoxDisk': 'DataBoxDiskJobSecrets', 'DataBoxHeavy': 'DataBoxHeavyJobSecrets'} - } - - def __init__( - self, - *, - dc_access_security_code: Optional["DcAccessSecurityCode"] = None, - **kwargs - ): - super(JobSecrets, self).__init__(**kwargs) - self.job_secrets_type = None # type: Optional[str] - self.dc_access_security_code = dc_access_security_code - - -class DataBoxDiskJobSecrets(JobSecrets): - """The secrets related to disk job. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant - filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_secrets_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :param dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. - :type dc_access_security_code: ~azure.mgmt.databox.models.DcAccessSecurityCode - :ivar disk_secrets: Contains the list of secrets object for that device. - :vartype disk_secrets: list[~azure.mgmt.databox.models.DiskSecret] - :ivar pass_key: PassKey for the disk Job. - :vartype pass_key: str - :ivar is_passkey_user_defined: Whether passkey was provided by user. - :vartype is_passkey_user_defined: bool - """ - - _validation = { - 'job_secrets_type': {'required': True}, - 'disk_secrets': {'readonly': True}, - 'pass_key': {'readonly': True}, - 'is_passkey_user_defined': {'readonly': True}, - } - - _attribute_map = { - 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, - 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, - 'disk_secrets': {'key': 'diskSecrets', 'type': '[DiskSecret]'}, - 'pass_key': {'key': 'passKey', 'type': 'str'}, - 'is_passkey_user_defined': {'key': 'isPasskeyUserDefined', 'type': 'bool'}, - } - - def __init__( - self, - *, - dc_access_security_code: Optional["DcAccessSecurityCode"] = None, - **kwargs - ): - super(DataBoxDiskJobSecrets, self).__init__(dc_access_security_code=dc_access_security_code, **kwargs) - self.job_secrets_type = 'DataBoxDisk' # type: str - self.disk_secrets = None - self.pass_key = None - self.is_passkey_user_defined = None - - -class DataBoxHeavyAccountCopyLogDetails(CopyLogDetails): - """Copy log details for a storage account for Databox heavy. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param copy_log_details_type: Required. Indicates the type of job details.Constant filled by - server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type copy_log_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :ivar account_name: Destination account name. - :vartype account_name: str - :ivar copy_log_link: Link for copy logs. - :vartype copy_log_link: list[str] - """ - - _validation = { - 'copy_log_details_type': {'required': True}, - 'account_name': {'readonly': True}, - 'copy_log_link': {'readonly': True}, - } - - _attribute_map = { - 'copy_log_details_type': {'key': 'copyLogDetailsType', 'type': 'str'}, - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'copy_log_link': {'key': 'copyLogLink', 'type': '[str]'}, - } - - def __init__( - self, - **kwargs - ): - super(DataBoxHeavyAccountCopyLogDetails, self).__init__(**kwargs) - self.copy_log_details_type = 'DataBoxHeavy' # type: str - self.account_name = None - self.copy_log_link = None - - -class DataBoxHeavyJobDetails(JobDetails): - """Databox Heavy Device Job Details. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param expected_data_size_in_terabytes: The expected size of the data, which needs to be - transferred in this job, in terabytes. - :type expected_data_size_in_terabytes: int - :ivar job_stages: List of stages that run in the job. - :vartype job_stages: list[~azure.mgmt.databox.models.JobStages] - :param contact_details: Required. Contact details for notification and shipping. - :type contact_details: ~azure.mgmt.databox.models.ContactDetails - :param shipping_address: Required. Shipping address of the customer. - :type shipping_address: ~azure.mgmt.databox.models.ShippingAddress - :ivar delivery_package: Delivery package shipping details. - :vartype delivery_package: ~azure.mgmt.databox.models.PackageShippingDetails - :ivar return_package: Return package shipping details. - :vartype return_package: ~azure.mgmt.databox.models.PackageShippingDetails - :param destination_account_details: Required. Destination account details. - :type destination_account_details: list[~azure.mgmt.databox.models.DestinationAccountDetails] - :ivar error_details: Error details for failure. This is optional. - :vartype error_details: list[~azure.mgmt.databox.models.JobErrorDetails] - :param job_details_type: Required. Indicates the type of job details.Constant filled by server. - Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :param preferences: Preferences for the order. - :type preferences: ~azure.mgmt.databox.models.Preferences - :ivar copy_log_details: List of copy log details. - :vartype copy_log_details: list[~azure.mgmt.databox.models.CopyLogDetails] - :ivar reverse_shipment_label_sas_key: Shared access key to download the return shipment label. - :vartype reverse_shipment_label_sas_key: str - :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. - :vartype chain_of_custody_sas_key: str - :ivar copy_progress: Copy progress per account. - :vartype copy_progress: list[~azure.mgmt.databox.models.CopyProgress] - :param device_password: Set Device password for unlocking Databox Heavy. - :type device_password: str - """ - - _validation = { - 'job_stages': {'readonly': True}, - 'contact_details': {'required': True}, - 'shipping_address': {'required': True}, - 'delivery_package': {'readonly': True}, - 'return_package': {'readonly': True}, - 'destination_account_details': {'required': True}, - 'error_details': {'readonly': True}, - 'job_details_type': {'required': True}, - 'copy_log_details': {'readonly': True}, - 'reverse_shipment_label_sas_key': {'readonly': True}, - 'chain_of_custody_sas_key': {'readonly': True}, - 'copy_progress': {'readonly': True}, - } - - _attribute_map = { - 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, - 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, - 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, - 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, - 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, - 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, - 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, - 'preferences': {'key': 'preferences', 'type': 'Preferences'}, - 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, - 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, - 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, - 'copy_progress': {'key': 'copyProgress', 'type': '[CopyProgress]'}, - 'device_password': {'key': 'devicePassword', 'type': 'str'}, - } - - def __init__( - self, - *, - contact_details: "ContactDetails", - shipping_address: "ShippingAddress", - destination_account_details: List["DestinationAccountDetails"], - expected_data_size_in_terabytes: Optional[int] = None, - preferences: Optional["Preferences"] = None, - device_password: Optional[str] = None, - **kwargs - ): - super(DataBoxHeavyJobDetails, self).__init__(expected_data_size_in_terabytes=expected_data_size_in_terabytes, contact_details=contact_details, shipping_address=shipping_address, destination_account_details=destination_account_details, preferences=preferences, **kwargs) - self.job_details_type = 'DataBoxHeavy' # type: str - self.copy_progress = None - self.device_password = device_password - - -class DataBoxHeavyJobSecrets(JobSecrets): - """The secrets related to a databox heavy job. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant - filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_secrets_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :param dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. - :type dc_access_security_code: ~azure.mgmt.databox.models.DcAccessSecurityCode - :ivar cabinet_pod_secrets: Contains the list of secret objects for a databox heavy job. - :vartype cabinet_pod_secrets: list[~azure.mgmt.databox.models.DataBoxHeavySecret] - """ - - _validation = { - 'job_secrets_type': {'required': True}, - 'cabinet_pod_secrets': {'readonly': True}, - } - - _attribute_map = { - 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, - 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, - 'cabinet_pod_secrets': {'key': 'cabinetPodSecrets', 'type': '[DataBoxHeavySecret]'}, - } - - def __init__( - self, - *, - dc_access_security_code: Optional["DcAccessSecurityCode"] = None, - **kwargs - ): - super(DataBoxHeavyJobSecrets, self).__init__(dc_access_security_code=dc_access_security_code, **kwargs) - self.job_secrets_type = 'DataBoxHeavy' # type: str - self.cabinet_pod_secrets = None - - -class DataBoxHeavySecret(msrest.serialization.Model): - """The secrets related to a databox heavy. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar device_serial_number: Serial number of the assigned device. - :vartype device_serial_number: str - :ivar device_password: Password for out of the box experience on device. - :vartype device_password: str - :ivar network_configurations: Network configuration of the appliance. - :vartype network_configurations: list[~azure.mgmt.databox.models.ApplianceNetworkConfiguration] - :ivar encoded_validation_cert_pub_key: The base 64 encoded public key to authenticate with the - device. - :vartype encoded_validation_cert_pub_key: str - :ivar account_credential_details: Per account level access credentials. - :vartype account_credential_details: list[~azure.mgmt.databox.models.AccountCredentialDetails] - """ - - _validation = { - 'device_serial_number': {'readonly': True}, - 'device_password': {'readonly': True}, - 'network_configurations': {'readonly': True}, - 'encoded_validation_cert_pub_key': {'readonly': True}, - 'account_credential_details': {'readonly': True}, - } - - _attribute_map = { - 'device_serial_number': {'key': 'deviceSerialNumber', 'type': 'str'}, - 'device_password': {'key': 'devicePassword', 'type': 'str'}, - 'network_configurations': {'key': 'networkConfigurations', 'type': '[ApplianceNetworkConfiguration]'}, - 'encoded_validation_cert_pub_key': {'key': 'encodedValidationCertPubKey', 'type': 'str'}, - 'account_credential_details': {'key': 'accountCredentialDetails', 'type': '[AccountCredentialDetails]'}, - } - - def __init__( - self, - **kwargs - ): - super(DataBoxHeavySecret, self).__init__(**kwargs) - self.device_serial_number = None - self.device_password = None - self.network_configurations = None - self.encoded_validation_cert_pub_key = None - self.account_credential_details = None - - -class DataBoxJobDetails(JobDetails): - """Databox Job Details. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param expected_data_size_in_terabytes: The expected size of the data, which needs to be - transferred in this job, in terabytes. - :type expected_data_size_in_terabytes: int - :ivar job_stages: List of stages that run in the job. - :vartype job_stages: list[~azure.mgmt.databox.models.JobStages] - :param contact_details: Required. Contact details for notification and shipping. - :type contact_details: ~azure.mgmt.databox.models.ContactDetails - :param shipping_address: Required. Shipping address of the customer. - :type shipping_address: ~azure.mgmt.databox.models.ShippingAddress - :ivar delivery_package: Delivery package shipping details. - :vartype delivery_package: ~azure.mgmt.databox.models.PackageShippingDetails - :ivar return_package: Return package shipping details. - :vartype return_package: ~azure.mgmt.databox.models.PackageShippingDetails - :param destination_account_details: Required. Destination account details. - :type destination_account_details: list[~azure.mgmt.databox.models.DestinationAccountDetails] - :ivar error_details: Error details for failure. This is optional. - :vartype error_details: list[~azure.mgmt.databox.models.JobErrorDetails] - :param job_details_type: Required. Indicates the type of job details.Constant filled by server. - Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_details_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :param preferences: Preferences for the order. - :type preferences: ~azure.mgmt.databox.models.Preferences - :ivar copy_log_details: List of copy log details. - :vartype copy_log_details: list[~azure.mgmt.databox.models.CopyLogDetails] - :ivar reverse_shipment_label_sas_key: Shared access key to download the return shipment label. - :vartype reverse_shipment_label_sas_key: str - :ivar chain_of_custody_sas_key: Shared access key to download the chain of custody logs. - :vartype chain_of_custody_sas_key: str - :ivar copy_progress: Copy progress per storage account. - :vartype copy_progress: list[~azure.mgmt.databox.models.CopyProgress] - :param device_password: Set Device password for unlocking Databox. - :type device_password: str - """ - - _validation = { - 'job_stages': {'readonly': True}, - 'contact_details': {'required': True}, - 'shipping_address': {'required': True}, - 'delivery_package': {'readonly': True}, - 'return_package': {'readonly': True}, - 'destination_account_details': {'required': True}, - 'error_details': {'readonly': True}, - 'job_details_type': {'required': True}, - 'copy_log_details': {'readonly': True}, - 'reverse_shipment_label_sas_key': {'readonly': True}, - 'chain_of_custody_sas_key': {'readonly': True}, - 'copy_progress': {'readonly': True}, - } - - _attribute_map = { - 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, - 'job_stages': {'key': 'jobStages', 'type': '[JobStages]'}, - 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, - 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, - 'delivery_package': {'key': 'deliveryPackage', 'type': 'PackageShippingDetails'}, - 'return_package': {'key': 'returnPackage', 'type': 'PackageShippingDetails'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, - 'job_details_type': {'key': 'jobDetailsType', 'type': 'str'}, - 'preferences': {'key': 'preferences', 'type': 'Preferences'}, - 'copy_log_details': {'key': 'copyLogDetails', 'type': '[CopyLogDetails]'}, - 'reverse_shipment_label_sas_key': {'key': 'reverseShipmentLabelSasKey', 'type': 'str'}, - 'chain_of_custody_sas_key': {'key': 'chainOfCustodySasKey', 'type': 'str'}, - 'copy_progress': {'key': 'copyProgress', 'type': '[CopyProgress]'}, - 'device_password': {'key': 'devicePassword', 'type': 'str'}, - } - - def __init__( - self, - *, - contact_details: "ContactDetails", - shipping_address: "ShippingAddress", - destination_account_details: List["DestinationAccountDetails"], - expected_data_size_in_terabytes: Optional[int] = None, - preferences: Optional["Preferences"] = None, - device_password: Optional[str] = None, - **kwargs - ): - super(DataBoxJobDetails, self).__init__(expected_data_size_in_terabytes=expected_data_size_in_terabytes, contact_details=contact_details, shipping_address=shipping_address, destination_account_details=destination_account_details, preferences=preferences, **kwargs) - self.job_details_type = 'DataBox' # type: str - self.copy_progress = None - self.device_password = device_password - - -class DataboxJobSecrets(JobSecrets): - """The secrets related to a databox job. - - All required parameters must be populated in order to send to Azure. - - :param job_secrets_type: Required. Used to indicate what type of job secrets object.Constant - filled by server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type job_secrets_type: str or ~azure.mgmt.databox.models.ClassDiscriminator - :param dc_access_security_code: Dc Access Security Code for Customer Managed Shipping. - :type dc_access_security_code: ~azure.mgmt.databox.models.DcAccessSecurityCode - :param pod_secrets: Contains the list of secret objects for a job. - :type pod_secrets: list[~azure.mgmt.databox.models.DataBoxSecret] - """ - - _validation = { - 'job_secrets_type': {'required': True}, - } - - _attribute_map = { - 'job_secrets_type': {'key': 'jobSecretsType', 'type': 'str'}, - 'dc_access_security_code': {'key': 'dcAccessSecurityCode', 'type': 'DcAccessSecurityCode'}, - 'pod_secrets': {'key': 'podSecrets', 'type': '[DataBoxSecret]'}, - } - - def __init__( - self, - *, - dc_access_security_code: Optional["DcAccessSecurityCode"] = None, - pod_secrets: Optional[List["DataBoxSecret"]] = None, - **kwargs - ): - super(DataboxJobSecrets, self).__init__(dc_access_security_code=dc_access_security_code, **kwargs) - self.job_secrets_type = 'DataBox' # type: str - self.pod_secrets = pod_secrets - - -class ScheduleAvailabilityRequest(msrest.serialization.Model): - """Request body to get the availability for scheduling orders. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DataBoxScheduleAvailabilityRequest, DiskScheduleAvailabilityRequest, HeavyScheduleAvailabilityRequest. - - All required parameters must be populated in order to send to Azure. - - :param storage_location: Required. Location for data transfer. - For locations check: https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api- - version=2018-01-01. - :type storage_location: str - :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by - server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type sku_name: str or ~azure.mgmt.databox.models.SkuName - """ - - _validation = { - 'storage_location': {'required': True}, - 'sku_name': {'required': True}, - } - - _attribute_map = { - 'storage_location': {'key': 'storageLocation', 'type': 'str'}, - 'sku_name': {'key': 'skuName', 'type': 'str'}, - } - - _subtype_map = { - 'sku_name': {'DataBox': 'DataBoxScheduleAvailabilityRequest', 'DataBoxDisk': 'DiskScheduleAvailabilityRequest', 'DataBoxHeavy': 'HeavyScheduleAvailabilityRequest'} - } - - def __init__( - self, - *, - storage_location: str, - **kwargs - ): - super(ScheduleAvailabilityRequest, self).__init__(**kwargs) - self.storage_location = storage_location - self.sku_name = None # type: Optional[str] - - -class DataBoxScheduleAvailabilityRequest(ScheduleAvailabilityRequest): - """Request body to get the availability for scheduling data box orders orders. - - All required parameters must be populated in order to send to Azure. - - :param storage_location: Required. Location for data transfer. - For locations check: https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api- - version=2018-01-01. - :type storage_location: str - :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by - server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type sku_name: str or ~azure.mgmt.databox.models.SkuName - """ - - _validation = { - 'storage_location': {'required': True}, - 'sku_name': {'required': True}, - } - - _attribute_map = { - 'storage_location': {'key': 'storageLocation', 'type': 'str'}, - 'sku_name': {'key': 'skuName', 'type': 'str'}, - } - - def __init__( - self, - *, - storage_location: str, - **kwargs - ): - super(DataBoxScheduleAvailabilityRequest, self).__init__(storage_location=storage_location, **kwargs) - self.sku_name = 'DataBox' # type: str - - -class DataBoxSecret(msrest.serialization.Model): - """The secrets related to a DataBox. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar device_serial_number: Serial number of the assigned device. - :vartype device_serial_number: str - :ivar device_password: Password for out of the box experience on device. - :vartype device_password: str - :ivar network_configurations: Network configuration of the appliance. - :vartype network_configurations: list[~azure.mgmt.databox.models.ApplianceNetworkConfiguration] - :ivar encoded_validation_cert_pub_key: The base 64 encoded public key to authenticate with the - device. - :vartype encoded_validation_cert_pub_key: str - :ivar account_credential_details: Per account level access credentials. - :vartype account_credential_details: list[~azure.mgmt.databox.models.AccountCredentialDetails] - """ - - _validation = { - 'device_serial_number': {'readonly': True}, - 'device_password': {'readonly': True}, - 'network_configurations': {'readonly': True}, - 'encoded_validation_cert_pub_key': {'readonly': True}, - 'account_credential_details': {'readonly': True}, - } - - _attribute_map = { - 'device_serial_number': {'key': 'deviceSerialNumber', 'type': 'str'}, - 'device_password': {'key': 'devicePassword', 'type': 'str'}, - 'network_configurations': {'key': 'networkConfigurations', 'type': '[ApplianceNetworkConfiguration]'}, - 'encoded_validation_cert_pub_key': {'key': 'encodedValidationCertPubKey', 'type': 'str'}, - 'account_credential_details': {'key': 'accountCredentialDetails', 'type': '[AccountCredentialDetails]'}, - } - - def __init__( - self, - **kwargs - ): - super(DataBoxSecret, self).__init__(**kwargs) - self.device_serial_number = None - self.device_password = None - self.network_configurations = None - self.encoded_validation_cert_pub_key = None - self.account_credential_details = None - - -class DataDestinationDetailsValidationRequest(ValidationInputRequest): - """Request to validate data destination details. - - All required parameters must be populated in order to send to Azure. - - :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", - "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator - :param destination_account_details: Required. Destination account details list. - :type destination_account_details: list[~azure.mgmt.databox.models.DestinationAccountDetails] - :param location: Required. Location of stamp or geo. - :type location: str - """ - - _validation = { - 'validation_type': {'required': True}, - 'destination_account_details': {'required': True}, - 'location': {'required': True}, - } - - _attribute_map = { - 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'destination_account_details': {'key': 'destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - 'location': {'key': 'location', 'type': 'str'}, - } - - def __init__( - self, - *, - destination_account_details: List["DestinationAccountDetails"], - location: str, - **kwargs - ): - super(DataDestinationDetailsValidationRequest, self).__init__(**kwargs) - self.validation_type = 'ValidateDataDestinationDetails' # type: str - self.destination_account_details = destination_account_details - self.location = location - - -class DataDestinationDetailsValidationResponseProperties(ValidationInputResponse): - """Properties of data destination details validation response. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", - "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator - :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error - :ivar status: Data destination details validation status. Possible values include: "Valid", - "Invalid", "Skipped". - :vartype status: str or ~azure.mgmt.databox.models.ValidationStatus - """ - - _validation = { - 'validation_type': {'required': True}, - 'error': {'readonly': True}, - 'status': {'readonly': True}, - } - - _attribute_map = { - 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, - 'status': {'key': 'status', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DataDestinationDetailsValidationResponseProperties, self).__init__(**kwargs) - self.validation_type = 'ValidateDataDestinationDetails' # type: str - self.status = None - - -class DcAccessSecurityCode(msrest.serialization.Model): - """Dc Access Security code for device. - - :param forward_dc_access_code: Dc Access Code for dispatching from DC. - :type forward_dc_access_code: str - :param reverse_dc_access_code: Dc Access code for dropping off at DC. - :type reverse_dc_access_code: str - """ - - _attribute_map = { - 'forward_dc_access_code': {'key': 'forwardDcAccessCode', 'type': 'str'}, - 'reverse_dc_access_code': {'key': 'reverseDcAccessCode', 'type': 'str'}, - } - - def __init__( - self, - *, - forward_dc_access_code: Optional[str] = None, - reverse_dc_access_code: Optional[str] = None, - **kwargs - ): - super(DcAccessSecurityCode, self).__init__(**kwargs) - self.forward_dc_access_code = forward_dc_access_code - self.reverse_dc_access_code = reverse_dc_access_code - - -class DestinationAccountDetails(msrest.serialization.Model): - """Details of the destination storage accounts. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DestinationManagedDiskDetails, DestinationStorageAccountDetails. - - All required parameters must be populated in order to send to Azure. - - :param data_destination_type: Required. Data Destination Type.Constant filled by server. - Possible values include: "StorageAccount", "ManagedDisk". - :type data_destination_type: str or ~azure.mgmt.databox.models.DataDestinationType - :param account_id: Arm Id of the destination where the data has to be moved. - :type account_id: str - :param share_password: Share password to be shared by all shares in SA. - :type share_password: str - """ - - _validation = { - 'data_destination_type': {'required': True}, - } - - _attribute_map = { - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, - 'account_id': {'key': 'accountId', 'type': 'str'}, - 'share_password': {'key': 'sharePassword', 'type': 'str'}, - } - - _subtype_map = { - 'data_destination_type': {'ManagedDisk': 'DestinationManagedDiskDetails', 'StorageAccount': 'DestinationStorageAccountDetails'} - } - - def __init__( - self, - *, - account_id: Optional[str] = None, - share_password: Optional[str] = None, - **kwargs - ): - super(DestinationAccountDetails, self).__init__(**kwargs) - self.data_destination_type = None # type: Optional[str] - self.account_id = account_id - self.share_password = share_password - - -class DestinationManagedDiskDetails(DestinationAccountDetails): - """Details for the destination compute disks. - - All required parameters must be populated in order to send to Azure. - - :param data_destination_type: Required. Data Destination Type.Constant filled by server. - Possible values include: "StorageAccount", "ManagedDisk". - :type data_destination_type: str or ~azure.mgmt.databox.models.DataDestinationType - :param account_id: Arm Id of the destination where the data has to be moved. - :type account_id: str - :param share_password: Share password to be shared by all shares in SA. - :type share_password: str - :param resource_group_id: Required. Destination Resource Group Id where the Compute disks - should be created. - :type resource_group_id: str - :param staging_storage_account_id: Required. Arm Id of the storage account that can be used to - copy the vhd for staging. - :type staging_storage_account_id: str - """ - - _validation = { - 'data_destination_type': {'required': True}, - 'resource_group_id': {'required': True}, - 'staging_storage_account_id': {'required': True}, - } - - _attribute_map = { - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, - 'account_id': {'key': 'accountId', 'type': 'str'}, - 'share_password': {'key': 'sharePassword', 'type': 'str'}, - 'resource_group_id': {'key': 'resourceGroupId', 'type': 'str'}, - 'staging_storage_account_id': {'key': 'stagingStorageAccountId', 'type': 'str'}, - } - - def __init__( - self, - *, - resource_group_id: str, - staging_storage_account_id: str, - account_id: Optional[str] = None, - share_password: Optional[str] = None, - **kwargs - ): - super(DestinationManagedDiskDetails, self).__init__(account_id=account_id, share_password=share_password, **kwargs) - self.data_destination_type = 'ManagedDisk' # type: str - self.resource_group_id = resource_group_id - self.staging_storage_account_id = staging_storage_account_id - - -class DestinationStorageAccountDetails(DestinationAccountDetails): - """Details for the destination storage account. - - All required parameters must be populated in order to send to Azure. - - :param data_destination_type: Required. Data Destination Type.Constant filled by server. - Possible values include: "StorageAccount", "ManagedDisk". - :type data_destination_type: str or ~azure.mgmt.databox.models.DataDestinationType - :param account_id: Arm Id of the destination where the data has to be moved. - :type account_id: str - :param share_password: Share password to be shared by all shares in SA. - :type share_password: str - :param storage_account_id: Required. Destination Storage Account Arm Id. - :type storage_account_id: str - """ - - _validation = { - 'data_destination_type': {'required': True}, - 'storage_account_id': {'required': True}, - } - - _attribute_map = { - 'data_destination_type': {'key': 'dataDestinationType', 'type': 'str'}, - 'account_id': {'key': 'accountId', 'type': 'str'}, - 'share_password': {'key': 'sharePassword', 'type': 'str'}, - 'storage_account_id': {'key': 'storageAccountId', 'type': 'str'}, - } - - def __init__( - self, - *, - storage_account_id: str, - account_id: Optional[str] = None, - share_password: Optional[str] = None, - **kwargs - ): - super(DestinationStorageAccountDetails, self).__init__(account_id=account_id, share_password=share_password, **kwargs) - self.data_destination_type = 'StorageAccount' # type: str - self.storage_account_id = storage_account_id - - -class DestinationToServiceLocationMap(msrest.serialization.Model): - """Map of destination location to service location. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar destination_location: Location of the destination. - :vartype destination_location: str - :ivar service_location: Location of the service. - :vartype service_location: str - """ - - _validation = { - 'destination_location': {'readonly': True}, - 'service_location': {'readonly': True}, - } - - _attribute_map = { - 'destination_location': {'key': 'destinationLocation', 'type': 'str'}, - 'service_location': {'key': 'serviceLocation', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DestinationToServiceLocationMap, self).__init__(**kwargs) - self.destination_location = None - self.service_location = None - - -class DiskScheduleAvailabilityRequest(ScheduleAvailabilityRequest): - """Request body to get the availability for scheduling disk orders. - - All required parameters must be populated in order to send to Azure. - - :param storage_location: Required. Location for data transfer. - For locations check: https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api- - version=2018-01-01. - :type storage_location: str - :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by - server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type sku_name: str or ~azure.mgmt.databox.models.SkuName - :param expected_data_size_in_terabytes: Required. The expected size of the data, which needs to - be transferred in this job, in terabytes. - :type expected_data_size_in_terabytes: int - """ - - _validation = { - 'storage_location': {'required': True}, - 'sku_name': {'required': True}, - 'expected_data_size_in_terabytes': {'required': True}, - } - - _attribute_map = { - 'storage_location': {'key': 'storageLocation', 'type': 'str'}, - 'sku_name': {'key': 'skuName', 'type': 'str'}, - 'expected_data_size_in_terabytes': {'key': 'expectedDataSizeInTerabytes', 'type': 'int'}, - } - - def __init__( - self, - *, - storage_location: str, - expected_data_size_in_terabytes: int, - **kwargs - ): - super(DiskScheduleAvailabilityRequest, self).__init__(storage_location=storage_location, **kwargs) - self.sku_name = 'DataBoxDisk' # type: str - self.expected_data_size_in_terabytes = expected_data_size_in_terabytes - - -class DiskSecret(msrest.serialization.Model): - """Contains all the secrets of a Disk. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar disk_serial_number: Serial number of the assigned disk. - :vartype disk_serial_number: str - :ivar bit_locker_key: Bit Locker key of the disk which can be used to unlock the disk to copy - data. - :vartype bit_locker_key: str - """ - - _validation = { - 'disk_serial_number': {'readonly': True}, - 'bit_locker_key': {'readonly': True}, - } - - _attribute_map = { - 'disk_serial_number': {'key': 'diskSerialNumber', 'type': 'str'}, - 'bit_locker_key': {'key': 'bitLockerKey', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(DiskSecret, self).__init__(**kwargs) - self.disk_serial_number = None - self.bit_locker_key = None - - -class Error(msrest.serialization.Model): - """Top level error for the job. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar code: Error code that can be used to programmatically identify the error. - :vartype code: str - :ivar message: Describes the error in detail and provides debugging information. - :vartype message: str - """ - - _validation = { - 'code': {'readonly': True}, - 'message': {'readonly': True}, - } - - _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(Error, self).__init__(**kwargs) - self.code = None - self.message = None - - -class HeavyScheduleAvailabilityRequest(ScheduleAvailabilityRequest): - """Request body to get the availability for scheduling heavy orders. - - All required parameters must be populated in order to send to Azure. - - :param storage_location: Required. Location for data transfer. - For locations check: https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api- - version=2018-01-01. - :type storage_location: str - :param sku_name: Required. Sku Name for which the order is to be scheduled.Constant filled by - server. Possible values include: "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type sku_name: str or ~azure.mgmt.databox.models.SkuName - """ - - _validation = { - 'storage_location': {'required': True}, - 'sku_name': {'required': True}, - } - - _attribute_map = { - 'storage_location': {'key': 'storageLocation', 'type': 'str'}, - 'sku_name': {'key': 'skuName', 'type': 'str'}, - } - - def __init__( - self, - *, - storage_location: str, - **kwargs - ): - super(HeavyScheduleAvailabilityRequest, self).__init__(storage_location=storage_location, **kwargs) - self.sku_name = 'DataBoxHeavy' # type: str - - -class JobDeliveryInfo(msrest.serialization.Model): - """Additional delivery info. - - :param scheduled_date_time: Scheduled date time. - :type scheduled_date_time: ~datetime.datetime - """ - - _attribute_map = { - 'scheduled_date_time': {'key': 'scheduledDateTime', 'type': 'iso-8601'}, - } - - def __init__( - self, - *, - scheduled_date_time: Optional[datetime.datetime] = None, - **kwargs - ): - super(JobDeliveryInfo, self).__init__(**kwargs) - self.scheduled_date_time = scheduled_date_time - - -class JobErrorDetails(msrest.serialization.Model): - """Job Error Details for providing the information and recommended action. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar error_message: Message for the error. - :vartype error_message: str - :ivar error_code: Code for the error. - :vartype error_code: int - :ivar recommended_action: Recommended action for the error. - :vartype recommended_action: str - :ivar exception_message: Contains the non localized exception message. - :vartype exception_message: str - """ - - _validation = { - 'error_message': {'readonly': True}, - 'error_code': {'readonly': True}, - 'recommended_action': {'readonly': True}, - 'exception_message': {'readonly': True}, - } - - _attribute_map = { - 'error_message': {'key': 'errorMessage', 'type': 'str'}, - 'error_code': {'key': 'errorCode', 'type': 'int'}, - 'recommended_action': {'key': 'recommendedAction', 'type': 'str'}, - 'exception_message': {'key': 'exceptionMessage', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(JobErrorDetails, self).__init__(**kwargs) - self.error_message = None - self.error_code = None - self.recommended_action = None - self.exception_message = None - - -class Resource(msrest.serialization.Model): - """Model of the Resource. - - All required parameters must be populated in order to send to Azure. - - :param location: Required. The location of the resource. This will be one of the supported and - registered Azure Regions (e.g. West US, East US, Southeast Asia, etc.). The region of a - resource cannot be changed once it is created, but if an identical region is specified on - update the request will succeed. - :type location: str - :param tags: A set of tags. The list of key value pairs that describe the resource. These tags - can be used in viewing and grouping this resource (across resource groups). - :type tags: dict[str, str] - :param sku: Required. The sku type. - :type sku: ~azure.mgmt.databox.models.Sku - """ - - _validation = { - 'location': {'required': True}, - 'sku': {'required': True}, - } - - _attribute_map = { - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - } - - def __init__( - self, - *, - location: str, - sku: "Sku", - tags: Optional[Dict[str, str]] = None, - **kwargs - ): - super(Resource, self).__init__(**kwargs) - self.location = location - self.tags = tags - self.sku = sku - - -class JobResource(Resource): - """Job Resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param location: Required. The location of the resource. This will be one of the supported and - registered Azure Regions (e.g. West US, East US, Southeast Asia, etc.). The region of a - resource cannot be changed once it is created, but if an identical region is specified on - update the request will succeed. - :type location: str - :param tags: A set of tags. The list of key value pairs that describe the resource. These tags - can be used in viewing and grouping this resource (across resource groups). - :type tags: dict[str, str] - :param sku: Required. The sku type. - :type sku: ~azure.mgmt.databox.models.Sku - :ivar name: Name of the object. - :vartype name: str - :ivar id: Id of the object. - :vartype id: str - :ivar type: Type of the object. - :vartype type: str - :ivar is_cancellable: Describes whether the job is cancellable or not. - :vartype is_cancellable: bool - :ivar is_deletable: Describes whether the job is deletable or not. - :vartype is_deletable: bool - :ivar is_shipping_address_editable: Describes whether the shipping address is editable or not. - :vartype is_shipping_address_editable: bool - :ivar status: Name of the stage which is in progress. Possible values include: "DeviceOrdered", - "DevicePrepared", "Dispatched", "Delivered", "PickedUp", "AtAzureDC", "DataCopy", "Completed", - "CompletedWithErrors", "Cancelled", "Failed_IssueReportedAtCustomer", - "Failed_IssueDetectedAtAzureDC", "Aborted", "CompletedWithWarnings", - "ReadyToDispatchFromAzureDC", "ReadyToReceiveAtAzureDC". - :vartype status: str or ~azure.mgmt.databox.models.StageName - :ivar start_time: Time at which the job was started in UTC ISO 8601 format. - :vartype start_time: ~datetime.datetime - :ivar error: Top level error for the job. - :vartype error: ~azure.mgmt.databox.models.Error - :param details: Details of a job run. This field will only be sent for expand details filter. - :type details: ~azure.mgmt.databox.models.JobDetails - :ivar cancellation_reason: Reason for cancellation. - :vartype cancellation_reason: str - :param delivery_type: Delivery type of Job. Possible values include: "NonScheduled", - "Scheduled". - :type delivery_type: str or ~azure.mgmt.databox.models.JobDeliveryType - :param delivery_info: Delivery Info of Job. - :type delivery_info: ~azure.mgmt.databox.models.JobDeliveryInfo - :ivar is_cancellable_without_fee: Flag to indicate cancellation of scheduled job. - :vartype is_cancellable_without_fee: bool - """ - - _validation = { - 'location': {'required': True}, - 'sku': {'required': True}, - 'name': {'readonly': True}, - 'id': {'readonly': True}, - 'type': {'readonly': True}, - 'is_cancellable': {'readonly': True}, - 'is_deletable': {'readonly': True}, - 'is_shipping_address_editable': {'readonly': True}, - 'status': {'readonly': True}, - 'start_time': {'readonly': True}, - 'error': {'readonly': True}, - 'cancellation_reason': {'readonly': True}, - 'is_cancellable_without_fee': {'readonly': True}, - } - - _attribute_map = { - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'name': {'key': 'name', 'type': 'str'}, - 'id': {'key': 'id', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'is_cancellable': {'key': 'properties.isCancellable', 'type': 'bool'}, - 'is_deletable': {'key': 'properties.isDeletable', 'type': 'bool'}, - 'is_shipping_address_editable': {'key': 'properties.isShippingAddressEditable', 'type': 'bool'}, - 'status': {'key': 'properties.status', 'type': 'str'}, - 'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'}, - 'error': {'key': 'properties.error', 'type': 'Error'}, - 'details': {'key': 'properties.details', 'type': 'JobDetails'}, - 'cancellation_reason': {'key': 'properties.cancellationReason', 'type': 'str'}, - 'delivery_type': {'key': 'properties.deliveryType', 'type': 'str'}, - 'delivery_info': {'key': 'properties.deliveryInfo', 'type': 'JobDeliveryInfo'}, - 'is_cancellable_without_fee': {'key': 'properties.isCancellableWithoutFee', 'type': 'bool'}, - } - - def __init__( - self, - *, - location: str, - sku: "Sku", - tags: Optional[Dict[str, str]] = None, - details: Optional["JobDetails"] = None, - delivery_type: Optional[Union[str, "JobDeliveryType"]] = None, - delivery_info: Optional["JobDeliveryInfo"] = None, - **kwargs - ): - super(JobResource, self).__init__(location=location, tags=tags, sku=sku, **kwargs) - self.name = None - self.id = None - self.type = None - self.is_cancellable = None - self.is_deletable = None - self.is_shipping_address_editable = None - self.status = None - self.start_time = None - self.error = None - self.details = details - self.cancellation_reason = None - self.delivery_type = delivery_type - self.delivery_info = delivery_info - self.is_cancellable_without_fee = None - - -class JobResourceList(msrest.serialization.Model): - """Job Resource Collection. - - :param value: List of job resources. - :type value: list[~azure.mgmt.databox.models.JobResource] - :param next_link: Link for the next set of job resources. - :type next_link: str - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[JobResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - *, - value: Optional[List["JobResource"]] = None, - next_link: Optional[str] = None, - **kwargs - ): - super(JobResourceList, self).__init__(**kwargs) - self.value = value - self.next_link = next_link - - -class JobResourceUpdateParameter(msrest.serialization.Model): - """The JobResourceUpdateParameter. - - :param tags: A set of tags. The list of key value pairs that describe the resource. These tags - can be used in viewing and grouping this resource (across resource groups). - :type tags: dict[str, str] - :param details: Details of a job to be updated. - :type details: ~azure.mgmt.databox.models.UpdateJobDetails - :param destination_account_details: Destination account details. - :type destination_account_details: list[~azure.mgmt.databox.models.DestinationAccountDetails] - """ - - _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, - 'details': {'key': 'properties.details', 'type': 'UpdateJobDetails'}, - 'destination_account_details': {'key': 'properties.destinationAccountDetails', 'type': '[DestinationAccountDetails]'}, - } - - def __init__( - self, - *, - tags: Optional[Dict[str, str]] = None, - details: Optional["UpdateJobDetails"] = None, - destination_account_details: Optional[List["DestinationAccountDetails"]] = None, - **kwargs - ): - super(JobResourceUpdateParameter, self).__init__(**kwargs) - self.tags = tags - self.details = details - self.destination_account_details = destination_account_details - - -class JobStages(msrest.serialization.Model): - """Job stages. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar stage_name: Name of the job stage. Possible values include: "DeviceOrdered", - "DevicePrepared", "Dispatched", "Delivered", "PickedUp", "AtAzureDC", "DataCopy", "Completed", - "CompletedWithErrors", "Cancelled", "Failed_IssueReportedAtCustomer", - "Failed_IssueDetectedAtAzureDC", "Aborted", "CompletedWithWarnings", - "ReadyToDispatchFromAzureDC", "ReadyToReceiveAtAzureDC". - :vartype stage_name: str or ~azure.mgmt.databox.models.StageName - :ivar display_name: Display name of the job stage. - :vartype display_name: str - :ivar stage_status: Status of the job stage. Possible values include: "None", "InProgress", - "Succeeded", "Failed", "Cancelled", "Cancelling", "SucceededWithErrors". - :vartype stage_status: str or ~azure.mgmt.databox.models.StageStatus - :ivar stage_time: Time for the job stage in UTC ISO 8601 format. - :vartype stage_time: ~datetime.datetime - :ivar job_stage_details: Job Stage Details. - :vartype job_stage_details: object - :ivar error_details: Error details for the stage. - :vartype error_details: list[~azure.mgmt.databox.models.JobErrorDetails] - """ - - _validation = { - 'stage_name': {'readonly': True}, - 'display_name': {'readonly': True}, - 'stage_status': {'readonly': True}, - 'stage_time': {'readonly': True}, - 'job_stage_details': {'readonly': True}, - 'error_details': {'readonly': True}, - } - - _attribute_map = { - 'stage_name': {'key': 'stageName', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'stage_status': {'key': 'stageStatus', 'type': 'str'}, - 'stage_time': {'key': 'stageTime', 'type': 'iso-8601'}, - 'job_stage_details': {'key': 'jobStageDetails', 'type': 'object'}, - 'error_details': {'key': 'errorDetails', 'type': '[JobErrorDetails]'}, - } - - def __init__( - self, - **kwargs - ): - super(JobStages, self).__init__(**kwargs) - self.stage_name = None - self.display_name = None - self.stage_status = None - self.stage_time = None - self.job_stage_details = None - self.error_details = None - - -class NotificationPreference(msrest.serialization.Model): - """Notification preference for a job stage. - - All required parameters must be populated in order to send to Azure. - - :param stage_name: Required. Name of the stage. Possible values include: "DevicePrepared", - "Dispatched", "Delivered", "PickedUp", "AtAzureDC", "DataCopy". - :type stage_name: str or ~azure.mgmt.databox.models.NotificationStageName - :param send_notification: Required. Notification is required or not. - :type send_notification: bool - """ - - _validation = { - 'stage_name': {'required': True}, - 'send_notification': {'required': True}, - } - - _attribute_map = { - 'stage_name': {'key': 'stageName', 'type': 'str'}, - 'send_notification': {'key': 'sendNotification', 'type': 'bool'}, - } - - def __init__( - self, - *, - stage_name: Union[str, "NotificationStageName"], - send_notification: bool, - **kwargs - ): - super(NotificationPreference, self).__init__(**kwargs) - self.stage_name = stage_name - self.send_notification = send_notification - - -class Operation(msrest.serialization.Model): - """Operation entity. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar name: Name of the operation. Format: - {resourceProviderNamespace}/{resourceType}/{read|write|delete|action}. - :vartype name: str - :ivar display: Operation display values. - :vartype display: ~azure.mgmt.databox.models.OperationDisplay - :ivar properties: Operation properties. - :vartype properties: object - :ivar origin: Origin of the operation. Can be : user|system|user,system. - :vartype origin: str - """ - - _validation = { - 'name': {'readonly': True}, - 'display': {'readonly': True}, - 'properties': {'readonly': True}, - 'origin': {'readonly': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display': {'key': 'display', 'type': 'OperationDisplay'}, - 'properties': {'key': 'properties', 'type': 'object'}, - 'origin': {'key': 'origin', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(Operation, self).__init__(**kwargs) - self.name = None - self.display = None - self.properties = None - self.origin = None - - -class OperationDisplay(msrest.serialization.Model): - """Operation display. - - :param provider: Provider name. - :type provider: str - :param resource: Resource name. - :type resource: str - :param operation: Localized name of the operation for display purpose. - :type operation: str - :param description: Localized description of the operation for display purpose. - :type description: str - """ - - _attribute_map = { - 'provider': {'key': 'provider', 'type': 'str'}, - 'resource': {'key': 'resource', 'type': 'str'}, - 'operation': {'key': 'operation', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - } - - def __init__( - self, - *, - provider: Optional[str] = None, - resource: Optional[str] = None, - operation: Optional[str] = None, - description: Optional[str] = None, - **kwargs - ): - super(OperationDisplay, self).__init__(**kwargs) - self.provider = provider - self.resource = resource - self.operation = operation - self.description = description - - -class OperationList(msrest.serialization.Model): - """Operation Collection. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: List of operations. - :vartype value: list[~azure.mgmt.databox.models.Operation] - :param next_link: Link for the next set of operations. - :type next_link: str - """ - - _validation = { - 'value': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[Operation]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - *, - next_link: Optional[str] = None, - **kwargs - ): - super(OperationList, self).__init__(**kwargs) - self.value = None - self.next_link = next_link - - -class PackageShippingDetails(msrest.serialization.Model): - """Shipping details. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar carrier_name: Name of the carrier. - :vartype carrier_name: str - :ivar tracking_id: Tracking Id of shipment. - :vartype tracking_id: str - :ivar tracking_url: Url where shipment can be tracked. - :vartype tracking_url: str - """ - - _validation = { - 'carrier_name': {'readonly': True}, - 'tracking_id': {'readonly': True}, - 'tracking_url': {'readonly': True}, - } - - _attribute_map = { - 'carrier_name': {'key': 'carrierName', 'type': 'str'}, - 'tracking_id': {'key': 'trackingId', 'type': 'str'}, - 'tracking_url': {'key': 'trackingUrl', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PackageShippingDetails, self).__init__(**kwargs) - self.carrier_name = None - self.tracking_id = None - self.tracking_url = None - - -class Preferences(msrest.serialization.Model): - """Preferences related to the order. - - :param preferred_data_center_region: Preferred Data Center Region. - :type preferred_data_center_region: list[str] - :param transport_preferences: Preferences related to the shipment logistics of the sku. - :type transport_preferences: ~azure.mgmt.databox.models.TransportPreferences - """ - - _attribute_map = { - 'preferred_data_center_region': {'key': 'preferredDataCenterRegion', 'type': '[str]'}, - 'transport_preferences': {'key': 'transportPreferences', 'type': 'TransportPreferences'}, - } - - def __init__( - self, - *, - preferred_data_center_region: Optional[List[str]] = None, - transport_preferences: Optional["TransportPreferences"] = None, - **kwargs - ): - super(Preferences, self).__init__(**kwargs) - self.preferred_data_center_region = preferred_data_center_region - self.transport_preferences = transport_preferences - - -class PreferencesValidationRequest(ValidationInputRequest): - """Request to validate preference of transport and data center. - - All required parameters must be populated in order to send to Azure. - - :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", - "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator - :param preference: Preference requested with respect to transport type and data center. - :type preference: ~azure.mgmt.databox.models.Preferences - :param device_type: Required. Device type to be used for the job. Possible values include: - "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type device_type: str or ~azure.mgmt.databox.models.SkuName - """ - - _validation = { - 'validation_type': {'required': True}, - 'device_type': {'required': True}, - } - - _attribute_map = { - 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'preference': {'key': 'preference', 'type': 'Preferences'}, - 'device_type': {'key': 'deviceType', 'type': 'str'}, - } - - def __init__( - self, - *, - device_type: Union[str, "SkuName"], - preference: Optional["Preferences"] = None, - **kwargs - ): - super(PreferencesValidationRequest, self).__init__(**kwargs) - self.validation_type = 'ValidatePreferences' # type: str - self.preference = preference - self.device_type = device_type - - -class PreferencesValidationResponseProperties(ValidationInputResponse): - """Properties of data center and transport preference validation response. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", - "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator - :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error - :ivar status: Validation status of requested data center and transport. Possible values - include: "Valid", "Invalid", "Skipped". - :vartype status: str or ~azure.mgmt.databox.models.ValidationStatus - """ - - _validation = { - 'validation_type': {'required': True}, - 'error': {'readonly': True}, - 'status': {'readonly': True}, - } - - _attribute_map = { - 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, - 'status': {'key': 'status', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PreferencesValidationResponseProperties, self).__init__(**kwargs) - self.validation_type = 'ValidatePreferences' # type: str - self.status = None - - -class RegionConfigurationRequest(msrest.serialization.Model): - """Request body to get the configuration for the region. - - :param schedule_availability_request: Request body to get the availability for scheduling - orders. - :type schedule_availability_request: ~azure.mgmt.databox.models.ScheduleAvailabilityRequest - :param transport_availability_request: Request body to get the transport availability for given - sku. - :type transport_availability_request: ~azure.mgmt.databox.models.TransportAvailabilityRequest - """ - - _attribute_map = { - 'schedule_availability_request': {'key': 'scheduleAvailabilityRequest', 'type': 'ScheduleAvailabilityRequest'}, - 'transport_availability_request': {'key': 'transportAvailabilityRequest', 'type': 'TransportAvailabilityRequest'}, - } - - def __init__( - self, - *, - schedule_availability_request: Optional["ScheduleAvailabilityRequest"] = None, - transport_availability_request: Optional["TransportAvailabilityRequest"] = None, - **kwargs - ): - super(RegionConfigurationRequest, self).__init__(**kwargs) - self.schedule_availability_request = schedule_availability_request - self.transport_availability_request = transport_availability_request - - -class RegionConfigurationResponse(msrest.serialization.Model): - """Configuration response specific to a region. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar schedule_availability_response: Schedule availability for given sku in a region. - :vartype schedule_availability_response: - ~azure.mgmt.databox.models.ScheduleAvailabilityResponse - :ivar transport_availability_response: Transport options available for given sku in a region. - :vartype transport_availability_response: - ~azure.mgmt.databox.models.TransportAvailabilityResponse - """ - - _validation = { - 'schedule_availability_response': {'readonly': True}, - 'transport_availability_response': {'readonly': True}, - } - - _attribute_map = { - 'schedule_availability_response': {'key': 'scheduleAvailabilityResponse', 'type': 'ScheduleAvailabilityResponse'}, - 'transport_availability_response': {'key': 'transportAvailabilityResponse', 'type': 'TransportAvailabilityResponse'}, - } - - def __init__( - self, - **kwargs - ): - super(RegionConfigurationResponse, self).__init__(**kwargs) - self.schedule_availability_response = None - self.transport_availability_response = None - - -class ScheduleAvailabilityResponse(msrest.serialization.Model): - """Schedule availability response for given sku in a region. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar available_dates: List of dates available to schedule. - :vartype available_dates: list[~datetime.datetime] - """ - - _validation = { - 'available_dates': {'readonly': True}, - } - - _attribute_map = { - 'available_dates': {'key': 'availableDates', 'type': '[iso-8601]'}, - } - - def __init__( - self, - **kwargs - ): - super(ScheduleAvailabilityResponse, self).__init__(**kwargs) - self.available_dates = None - - -class ShareCredentialDetails(msrest.serialization.Model): - """Credential details of the shares in account. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar share_name: Name of the share. - :vartype share_name: str - :ivar share_type: Type of the share. Possible values include: "UnknownType", "HCS", - "BlockBlob", "PageBlob", "AzureFile", "ManagedDisk". - :vartype share_type: str or ~azure.mgmt.databox.models.ShareDestinationFormatType - :ivar user_name: User name for the share. - :vartype user_name: str - :ivar password: Password for the share. - :vartype password: str - :ivar supported_access_protocols: Access protocols supported on the device. - :vartype supported_access_protocols: list[str or ~azure.mgmt.databox.models.AccessProtocol] - """ - - _validation = { - 'share_name': {'readonly': True}, - 'share_type': {'readonly': True}, - 'user_name': {'readonly': True}, - 'password': {'readonly': True}, - 'supported_access_protocols': {'readonly': True}, - } - - _attribute_map = { - 'share_name': {'key': 'shareName', 'type': 'str'}, - 'share_type': {'key': 'shareType', 'type': 'str'}, - 'user_name': {'key': 'userName', 'type': 'str'}, - 'password': {'key': 'password', 'type': 'str'}, - 'supported_access_protocols': {'key': 'supportedAccessProtocols', 'type': '[str]'}, - } - - def __init__( - self, - **kwargs - ): - super(ShareCredentialDetails, self).__init__(**kwargs) - self.share_name = None - self.share_type = None - self.user_name = None - self.password = None - self.supported_access_protocols = None - - -class ShipmentPickUpRequest(msrest.serialization.Model): - """Shipment pick up request details. - - All required parameters must be populated in order to send to Azure. - - :param start_time: Required. Minimum date after which the pick up should commence, this must be - in local time of pick up area. - :type start_time: ~datetime.datetime - :param end_time: Required. Maximum date before which the pick up should commence, this must be - in local time of pick up area. - :type end_time: ~datetime.datetime - :param shipment_location: Required. Shipment Location in the pickup place. Eg.front desk. - :type shipment_location: str - """ - - _validation = { - 'start_time': {'required': True}, - 'end_time': {'required': True}, - 'shipment_location': {'required': True}, - } - - _attribute_map = { - 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, - 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, - 'shipment_location': {'key': 'shipmentLocation', 'type': 'str'}, - } - - def __init__( - self, - *, - start_time: datetime.datetime, - end_time: datetime.datetime, - shipment_location: str, - **kwargs - ): - super(ShipmentPickUpRequest, self).__init__(**kwargs) - self.start_time = start_time - self.end_time = end_time - self.shipment_location = shipment_location - - -class ShipmentPickUpResponse(msrest.serialization.Model): - """Shipment pick up response. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar confirmation_number: Confirmation number for the pick up request. - :vartype confirmation_number: str - :ivar ready_by_time: Time by which shipment should be ready for pick up, this is in local time - of pick up area. - :vartype ready_by_time: ~datetime.datetime - """ - - _validation = { - 'confirmation_number': {'readonly': True}, - 'ready_by_time': {'readonly': True}, - } - - _attribute_map = { - 'confirmation_number': {'key': 'confirmationNumber', 'type': 'str'}, - 'ready_by_time': {'key': 'readyByTime', 'type': 'iso-8601'}, - } - - def __init__( - self, - **kwargs - ): - super(ShipmentPickUpResponse, self).__init__(**kwargs) - self.confirmation_number = None - self.ready_by_time = None - - -class ShippingAddress(msrest.serialization.Model): - """Shipping address where customer wishes to receive the device. - - All required parameters must be populated in order to send to Azure. - - :param street_address1: Required. Street Address line 1. - :type street_address1: str - :param street_address2: Street Address line 2. - :type street_address2: str - :param street_address3: Street Address line 3. - :type street_address3: str - :param city: Name of the City. - :type city: str - :param state_or_province: Name of the State or Province. - :type state_or_province: str - :param country: Required. Name of the Country. - :type country: str - :param postal_code: Required. Postal code. - :type postal_code: str - :param zip_extended_code: Extended Zip Code. - :type zip_extended_code: str - :param company_name: Name of the company. - :type company_name: str - :param address_type: Type of address. Possible values include: "None", "Residential", - "Commercial". - :type address_type: str or ~azure.mgmt.databox.models.AddressType - """ - - _validation = { - 'street_address1': {'required': True}, - 'country': {'required': True}, - 'postal_code': {'required': True}, - } - - _attribute_map = { - 'street_address1': {'key': 'streetAddress1', 'type': 'str'}, - 'street_address2': {'key': 'streetAddress2', 'type': 'str'}, - 'street_address3': {'key': 'streetAddress3', 'type': 'str'}, - 'city': {'key': 'city', 'type': 'str'}, - 'state_or_province': {'key': 'stateOrProvince', 'type': 'str'}, - 'country': {'key': 'country', 'type': 'str'}, - 'postal_code': {'key': 'postalCode', 'type': 'str'}, - 'zip_extended_code': {'key': 'zipExtendedCode', 'type': 'str'}, - 'company_name': {'key': 'companyName', 'type': 'str'}, - 'address_type': {'key': 'addressType', 'type': 'str'}, - } - - def __init__( - self, - *, - street_address1: str, - country: str, - postal_code: str, - street_address2: Optional[str] = None, - street_address3: Optional[str] = None, - city: Optional[str] = None, - state_or_province: Optional[str] = None, - zip_extended_code: Optional[str] = None, - company_name: Optional[str] = None, - address_type: Optional[Union[str, "AddressType"]] = None, - **kwargs - ): - super(ShippingAddress, self).__init__(**kwargs) - self.street_address1 = street_address1 - self.street_address2 = street_address2 - self.street_address3 = street_address3 - self.city = city - self.state_or_province = state_or_province - self.country = country - self.postal_code = postal_code - self.zip_extended_code = zip_extended_code - self.company_name = company_name - self.address_type = address_type - - -class Sku(msrest.serialization.Model): - """The Sku. - - All required parameters must be populated in order to send to Azure. - - :param name: Required. The sku name. Possible values include: "DataBox", "DataBoxDisk", - "DataBoxHeavy". - :type name: str or ~azure.mgmt.databox.models.SkuName - :param display_name: The display name of the sku. - :type display_name: str - :param family: The sku family. - :type family: str - """ - - _validation = { - 'name': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'family': {'key': 'family', 'type': 'str'}, - } - - def __init__( - self, - *, - name: Union[str, "SkuName"], - display_name: Optional[str] = None, - family: Optional[str] = None, - **kwargs - ): - super(Sku, self).__init__(**kwargs) - self.name = name - self.display_name = display_name - self.family = family - - -class SkuAvailabilityValidationRequest(ValidationInputRequest): - """Request to validate sku availability. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", - "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator - :param device_type: Required. Device type to be used for the job. Possible values include: - "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type device_type: str or ~azure.mgmt.databox.models.SkuName - :ivar transfer_type: Required. Type of the transfer. Default value: "ImportToAzure". - :vartype transfer_type: str - :param country: Required. ISO country code. Country for hardware shipment. For codes check: - https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements. - :type country: str - :param location: Required. Location for data transfer. For locations check: - https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01. - :type location: str - """ - - _validation = { - 'validation_type': {'required': True}, - 'device_type': {'required': True}, - 'transfer_type': {'required': True, 'constant': True}, - 'country': {'required': True}, - 'location': {'required': True}, - } - - _attribute_map = { - 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'device_type': {'key': 'deviceType', 'type': 'str'}, - 'transfer_type': {'key': 'transferType', 'type': 'str'}, - 'country': {'key': 'country', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - } - - transfer_type = "ImportToAzure" - - def __init__( - self, - *, - device_type: Union[str, "SkuName"], - country: str, - location: str, - **kwargs - ): - super(SkuAvailabilityValidationRequest, self).__init__(**kwargs) - self.validation_type = 'ValidateSkuAvailability' # type: str - self.device_type = device_type - self.country = country - self.location = location - - -class SkuAvailabilityValidationResponseProperties(ValidationInputResponse): - """Properties of sku availability validation response. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", - "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator - :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error - :ivar status: Sku availability validation status. Possible values include: "Valid", "Invalid", - "Skipped". - :vartype status: str or ~azure.mgmt.databox.models.ValidationStatus - """ - - _validation = { - 'validation_type': {'required': True}, - 'error': {'readonly': True}, - 'status': {'readonly': True}, - } - - _attribute_map = { - 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, - 'status': {'key': 'status', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SkuAvailabilityValidationResponseProperties, self).__init__(**kwargs) - self.validation_type = 'ValidateSkuAvailability' # type: str - self.status = None - - -class SkuCapacity(msrest.serialization.Model): - """Capacity of the sku. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar usable: Usable capacity in TB. - :vartype usable: str - :ivar maximum: Maximum capacity in TB. - :vartype maximum: str - """ - - _validation = { - 'usable': {'readonly': True}, - 'maximum': {'readonly': True}, - } - - _attribute_map = { - 'usable': {'key': 'usable', 'type': 'str'}, - 'maximum': {'key': 'maximum', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SkuCapacity, self).__init__(**kwargs) - self.usable = None - self.maximum = None - - -class SkuCost(msrest.serialization.Model): - """Describes metadata for retrieving price info. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar meter_id: Meter id of the Sku. - :vartype meter_id: str - :ivar meter_type: The type of the meter. - :vartype meter_type: str - """ - - _validation = { - 'meter_id': {'readonly': True}, - 'meter_type': {'readonly': True}, - } - - _attribute_map = { - 'meter_id': {'key': 'meterId', 'type': 'str'}, - 'meter_type': {'key': 'meterType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SkuCost, self).__init__(**kwargs) - self.meter_id = None - self.meter_type = None - - -class SkuInformation(msrest.serialization.Model): - """Information of the sku. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar sku: The Sku. - :vartype sku: ~azure.mgmt.databox.models.Sku - :ivar enabled: The sku is enabled or not. - :vartype enabled: bool - :ivar destination_to_service_location_map: The map of destination location to service location. - :vartype destination_to_service_location_map: - list[~azure.mgmt.databox.models.DestinationToServiceLocationMap] - :ivar capacity: Capacity of the Sku. - :vartype capacity: ~azure.mgmt.databox.models.SkuCapacity - :ivar costs: Cost of the Sku. - :vartype costs: list[~azure.mgmt.databox.models.SkuCost] - :ivar api_versions: Api versions that support this Sku. - :vartype api_versions: list[str] - :ivar disabled_reason: Reason why the Sku is disabled. Possible values include: "None", - "Country", "Region", "Feature", "OfferType", "NoSubscriptionInfo". - :vartype disabled_reason: str or ~azure.mgmt.databox.models.SkuDisabledReason - :ivar disabled_reason_message: Message for why the Sku is disabled. - :vartype disabled_reason_message: str - :ivar required_feature: Required feature to access the sku. - :vartype required_feature: str - """ - - _validation = { - 'sku': {'readonly': True}, - 'enabled': {'readonly': True}, - 'destination_to_service_location_map': {'readonly': True}, - 'capacity': {'readonly': True}, - 'costs': {'readonly': True}, - 'api_versions': {'readonly': True}, - 'disabled_reason': {'readonly': True}, - 'disabled_reason_message': {'readonly': True}, - 'required_feature': {'readonly': True}, - } - - _attribute_map = { - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'enabled': {'key': 'enabled', 'type': 'bool'}, - 'destination_to_service_location_map': {'key': 'properties.destinationToServiceLocationMap', 'type': '[DestinationToServiceLocationMap]'}, - 'capacity': {'key': 'properties.capacity', 'type': 'SkuCapacity'}, - 'costs': {'key': 'properties.costs', 'type': '[SkuCost]'}, - 'api_versions': {'key': 'properties.apiVersions', 'type': '[str]'}, - 'disabled_reason': {'key': 'properties.disabledReason', 'type': 'str'}, - 'disabled_reason_message': {'key': 'properties.disabledReasonMessage', 'type': 'str'}, - 'required_feature': {'key': 'properties.requiredFeature', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SkuInformation, self).__init__(**kwargs) - self.sku = None - self.enabled = None - self.destination_to_service_location_map = None - self.capacity = None - self.costs = None - self.api_versions = None - self.disabled_reason = None - self.disabled_reason_message = None - self.required_feature = None - - -class SubscriptionIsAllowedToCreateJobValidationRequest(ValidationInputRequest): - """Request to validate subscription permission to create jobs. - - All required parameters must be populated in order to send to Azure. - - :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", - "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator - """ - - _validation = { - 'validation_type': {'required': True}, - } - - _attribute_map = { - 'validation_type': {'key': 'validationType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SubscriptionIsAllowedToCreateJobValidationRequest, self).__init__(**kwargs) - self.validation_type = 'ValidateSubscriptionIsAllowedToCreateJob' # type: str - - -class SubscriptionIsAllowedToCreateJobValidationResponseProperties(ValidationInputResponse): - """Properties of subscription permission to create job validation response. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param validation_type: Required. Identifies the type of validation response.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", - "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator - :ivar error: Error code and message of validation response. - :vartype error: ~azure.mgmt.databox.models.Error - :ivar status: Validation status of subscription permission to create job. Possible values - include: "Valid", "Invalid", "Skipped". - :vartype status: str or ~azure.mgmt.databox.models.ValidationStatus - """ - - _validation = { - 'validation_type': {'required': True}, - 'error': {'readonly': True}, - 'status': {'readonly': True}, - } - - _attribute_map = { - 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'error': {'key': 'error', 'type': 'Error'}, - 'status': {'key': 'status', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(SubscriptionIsAllowedToCreateJobValidationResponseProperties, self).__init__(**kwargs) - self.validation_type = 'ValidateSubscriptionIsAllowedToCreateJob' # type: str - self.status = None - - -class TransportAvailabilityDetails(msrest.serialization.Model): - """Transport options availability details for given region. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar shipment_type: Transport Shipment Type supported for given region. Possible values - include: "CustomerManaged", "MicrosoftManaged". - :vartype shipment_type: str or ~azure.mgmt.databox.models.TransportShipmentTypes - """ - - _validation = { - 'shipment_type': {'readonly': True}, - } - - _attribute_map = { - 'shipment_type': {'key': 'shipmentType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(TransportAvailabilityDetails, self).__init__(**kwargs) - self.shipment_type = None - - -class TransportAvailabilityRequest(msrest.serialization.Model): - """Request body to get the transport availability for given sku. - - :param sku_name: Type of the device. Possible values include: "DataBox", "DataBoxDisk", - "DataBoxHeavy". - :type sku_name: str or ~azure.mgmt.databox.models.SkuName - """ - - _attribute_map = { - 'sku_name': {'key': 'skuName', 'type': 'str'}, - } - - def __init__( - self, - *, - sku_name: Optional[Union[str, "SkuName"]] = None, - **kwargs - ): - super(TransportAvailabilityRequest, self).__init__(**kwargs) - self.sku_name = sku_name - - -class TransportAvailabilityResponse(msrest.serialization.Model): - """Transport options available for given sku in a region. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar transport_availability_details: List of transport availability details for given region. - :vartype transport_availability_details: - list[~azure.mgmt.databox.models.TransportAvailabilityDetails] - """ - - _validation = { - 'transport_availability_details': {'readonly': True}, - } - - _attribute_map = { - 'transport_availability_details': {'key': 'transportAvailabilityDetails', 'type': '[TransportAvailabilityDetails]'}, - } - - def __init__( - self, - **kwargs - ): - super(TransportAvailabilityResponse, self).__init__(**kwargs) - self.transport_availability_details = None - - -class TransportPreferences(msrest.serialization.Model): - """Preferences related to the shipment logistics of the sku. - - All required parameters must be populated in order to send to Azure. - - :param preferred_shipment_type: Required. Indicates Shipment Logistics type that the customer - preferred. Possible values include: "CustomerManaged", "MicrosoftManaged". - :type preferred_shipment_type: str or ~azure.mgmt.databox.models.TransportShipmentTypes - """ - - _validation = { - 'preferred_shipment_type': {'required': True}, - } - - _attribute_map = { - 'preferred_shipment_type': {'key': 'preferredShipmentType', 'type': 'str'}, - } - - def __init__( - self, - *, - preferred_shipment_type: Union[str, "TransportShipmentTypes"], - **kwargs - ): - super(TransportPreferences, self).__init__(**kwargs) - self.preferred_shipment_type = preferred_shipment_type - - -class UnencryptedCredentials(msrest.serialization.Model): - """Unencrypted credentials for accessing device. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar job_name: Name of the job. - :vartype job_name: str - :ivar job_secrets: Secrets related to this job. - :vartype job_secrets: ~azure.mgmt.databox.models.JobSecrets - """ - - _validation = { - 'job_name': {'readonly': True}, - 'job_secrets': {'readonly': True}, - } - - _attribute_map = { - 'job_name': {'key': 'jobName', 'type': 'str'}, - 'job_secrets': {'key': 'jobSecrets', 'type': 'JobSecrets'}, - } - - def __init__( - self, - **kwargs - ): - super(UnencryptedCredentials, self).__init__(**kwargs) - self.job_name = None - self.job_secrets = None - - -class UnencryptedCredentialsList(msrest.serialization.Model): - """List of unencrypted credentials for accessing device. - - :param value: List of unencrypted credentials. - :type value: list[~azure.mgmt.databox.models.UnencryptedCredentials] - :param next_link: Link for the next set of unencrypted credentials. - :type next_link: str - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[UnencryptedCredentials]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - *, - value: Optional[List["UnencryptedCredentials"]] = None, - next_link: Optional[str] = None, - **kwargs - ): - super(UnencryptedCredentialsList, self).__init__(**kwargs) - self.value = value - self.next_link = next_link - - -class UpdateJobDetails(msrest.serialization.Model): - """Job details for update. - - :param contact_details: Contact details for notification and shipping. - :type contact_details: ~azure.mgmt.databox.models.ContactDetails - :param shipping_address: Shipping address of the customer. - :type shipping_address: ~azure.mgmt.databox.models.ShippingAddress - """ - - _attribute_map = { - 'contact_details': {'key': 'contactDetails', 'type': 'ContactDetails'}, - 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, - } - - def __init__( - self, - *, - contact_details: Optional["ContactDetails"] = None, - shipping_address: Optional["ShippingAddress"] = None, - **kwargs - ): - super(UpdateJobDetails, self).__init__(**kwargs) - self.contact_details = contact_details - self.shipping_address = shipping_address - - -class ValidateAddress(ValidationInputRequest): - """The requirements to validate customer address where the device needs to be shipped. - - All required parameters must be populated in order to send to Azure. - - :param validation_type: Required. Identifies the type of validation request.Constant filled by - server. Possible values include: "ValidateAddress", "ValidateDataDestinationDetails", - "ValidateSubscriptionIsAllowedToCreateJob", "ValidatePreferences", "ValidateCreateOrderLimit", - "ValidateSkuAvailability". - :type validation_type: str or ~azure.mgmt.databox.models.ValidationInputDiscriminator - :param shipping_address: Required. Shipping address of the customer. - :type shipping_address: ~azure.mgmt.databox.models.ShippingAddress - :param device_type: Required. Device type to be used for the job. Possible values include: - "DataBox", "DataBoxDisk", "DataBoxHeavy". - :type device_type: str or ~azure.mgmt.databox.models.SkuName - :param transport_preferences: Preferences related to the shipment logistics of the sku. - :type transport_preferences: ~azure.mgmt.databox.models.TransportPreferences - """ - - _validation = { - 'validation_type': {'required': True}, - 'shipping_address': {'required': True}, - 'device_type': {'required': True}, - } - - _attribute_map = { - 'validation_type': {'key': 'validationType', 'type': 'str'}, - 'shipping_address': {'key': 'shippingAddress', 'type': 'ShippingAddress'}, - 'device_type': {'key': 'deviceType', 'type': 'str'}, - 'transport_preferences': {'key': 'transportPreferences', 'type': 'TransportPreferences'}, - } - - def __init__( - self, - *, - shipping_address: "ShippingAddress", - device_type: Union[str, "SkuName"], - transport_preferences: Optional["TransportPreferences"] = None, - **kwargs - ): - super(ValidateAddress, self).__init__(**kwargs) - self.validation_type = 'ValidateAddress' # type: str - self.shipping_address = shipping_address - self.device_type = device_type - self.transport_preferences = transport_preferences - - -class ValidationResponse(msrest.serialization.Model): - """Response of pre job creation validations. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar status: Overall validation status. Possible values include: "AllValidToProceed", - "InputsRevisitRequired", "CertainInputValidationsSkipped". - :vartype status: str or ~azure.mgmt.databox.models.OverallValidationStatus - :ivar individual_response_details: List of response details contain validationType and its - response as key and value respectively. - :vartype individual_response_details: list[~azure.mgmt.databox.models.ValidationInputResponse] - """ - - _validation = { - 'status': {'readonly': True}, - 'individual_response_details': {'readonly': True}, - } - - _attribute_map = { - 'status': {'key': 'properties.status', 'type': 'str'}, - 'individual_response_details': {'key': 'properties.individualResponseDetails', 'type': '[ValidationInputResponse]'}, - } - - def __init__( - self, - **kwargs - ): - super(ValidationResponse, self).__init__(**kwargs) - self.status = None - self.individual_response_details = None diff --git a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/py.typed b/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/py.typed deleted file mode 100644 index e5aff4f83af..00000000000 --- a/src/databox/azext_databox/vendored_sdks/databox/v2019_09_01/py.typed +++ /dev/null @@ -1 +0,0 @@ -# Marker file for PEP 561. \ No newline at end of file diff --git a/src/databox/report.md b/src/databox/report.md new file mode 100644 index 00000000000..ed3e7819646 --- /dev/null +++ b/src/databox/report.md @@ -0,0 +1,418 @@ +# Azure CLI Module Creation Report + +## EXTENSION +|CLI Extension|Command Groups| +|---------|------------| +|az databox|[groups](#CommandGroups) + +## GROUPS +### Command groups in `az databox` extension +|CLI Command Group|Group Swagger name|Commands| +|---------|------------|--------| +|az databox job|Jobs|[commands](#CommandsInJobs)| +|az databox||[commands](#CommandsIn)| +|az databox service|Service|[commands](#CommandsInService)| + +## COMMANDS +### Commands in `az databox` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az databox mitigate](#Mitigate)|Mitigate|[Parameters](#ParametersMitigate)|[Example](#ExamplesMitigate)| + +### Commands in `az databox job` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az databox job list](#JobsListByResourceGroup)|ListByResourceGroup|[Parameters](#ParametersJobsListByResourceGroup)|[Example](#ExamplesJobsListByResourceGroup)| +|[az databox job list](#JobsList)|List|[Parameters](#ParametersJobsList)|[Example](#ExamplesJobsList)| +|[az databox job show](#JobsGet)|Get|[Parameters](#ParametersJobsGet)|[Example](#ExamplesJobsGet)| +|[az databox job create](#JobsCreate)|Create|[Parameters](#ParametersJobsCreate)|[Example](#ExamplesJobsCreate)| +|[az databox job update](#JobsUpdate)|Update|[Parameters](#ParametersJobsUpdate)|[Example](#ExamplesJobsUpdate)| +|[az databox job delete](#JobsDelete)|Delete|[Parameters](#ParametersJobsDelete)|[Example](#ExamplesJobsDelete)| +|[az databox job book-shipment-pick-up](#JobsBookShipmentPickUp)|BookShipmentPickUp|[Parameters](#ParametersJobsBookShipmentPickUp)|[Example](#ExamplesJobsBookShipmentPickUp)| +|[az databox job cancel](#JobsCancel)|Cancel|[Parameters](#ParametersJobsCancel)|[Example](#ExamplesJobsCancel)| +|[az databox job list-credentials](#JobsListCredentials)|ListCredentials|[Parameters](#ParametersJobsListCredentials)|[Example](#ExamplesJobsListCredentials)| +|[az databox job mark-device-shipped](#JobsMarkDevicesShipped)|MarkDevicesShipped|[Parameters](#ParametersJobsMarkDevicesShipped)|[Example](#ExamplesJobsMarkDevicesShipped)| + +### Commands in `az databox service` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az databox service list-available-sku-by-resource-group](#ServiceListAvailableSkusByResourceGroup)|ListAvailableSkusByResourceGroup|[Parameters](#ParametersServiceListAvailableSkusByResourceGroup)|[Example](#ExamplesServiceListAvailableSkusByResourceGroup)| +|[az databox service region-configuration](#ServiceRegionConfiguration)|RegionConfiguration|[Parameters](#ParametersServiceRegionConfiguration)|[Example](#ExamplesServiceRegionConfiguration)| +|[az databox service region-configuration-by-resource-group](#ServiceRegionConfigurationByResourceGroup)|RegionConfigurationByResourceGroup|[Parameters](#ParametersServiceRegionConfigurationByResourceGroup)|[Example](#ExamplesServiceRegionConfigurationByResourceGroup)| +|[az databox service validate-address](#ServiceValidateAddress)|ValidateAddress|[Parameters](#ParametersServiceValidateAddress)|[Example](#ExamplesServiceValidateAddress)| +|[az databox service validate-input](#ServiceValidateInputs)|ValidateInputs|[Parameters](#ParametersServiceValidateInputs)|[Example](#ExamplesServiceValidateInputs)| +|[az databox service validate-input-by-resource-group](#ServiceValidateInputsByResourceGroup)|ValidateInputsByResourceGroup|[Parameters](#ParametersServiceValidateInputsByResourceGroup)|[Example](#ExamplesServiceValidateInputsByResourceGroup)| + + +## COMMAND DETAILS + +### group `az databox` +#### Command `az databox mitigate` + +##### Example +``` +az databox mitigate --job-name "SdkJob8367" --customer-resolution-code "MoveToCleanUpDevice" --resource-group \ +"SdkRg9836" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--job-name**|string|The name of the job Resource within the specified resource group. job names must be between 3 and 24 characters in length and use any alphanumeric and underscore only|job_name|jobName| +|**--resource-group-name**|string|The Resource Group Name|resource_group_name|resourceGroupName| +|**--customer-resolution-code**|sealed-choice|Resolution code for the job|customer_resolution_code|customerResolutionCode| + +### group `az databox job` +#### Command `az databox job list` + +##### Example +``` +az databox job list --resource-group "SdkRg5154" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The Resource Group Name|resource_group_name|resourceGroupName| +|**--skip-token**|string|$skipToken is supported on Get list of jobs, which provides the next page in the list of jobs.|skip_token|$skipToken| + +#### Command `az databox job list` + +##### Example +``` +az databox job list +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +#### Command `az databox job show` + +##### Example +``` +az databox job show --expand "details" --name "SdkJob952" --resource-group "SdkRg5154" +``` +##### Example +``` +az databox job show --expand "details" --name "SdkJob1735" --resource-group "SdkRg7937" +``` +##### Example +``` +az databox job show --expand "details" --name "TJx-637505258985313014" --resource-group "dmstestresource" +``` +##### Example +``` +az databox job show --expand "details" --name "SdkJob6429" --resource-group "SdkRg8091" +``` +##### Example +``` +az databox job show --expand "details" --name "TJx-637505258985313014" --resource-group "dmstestresource" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The Resource Group Name|resource_group_name|resourceGroupName| +|**--job-name**|string|The name of the job Resource within the specified resource group. job names must be between 3 and 24 characters in length and use any alphanumeric and underscore only|job_name|jobName| +|**--expand**|string|$expand is supported on details parameter for job, which provides details on the job stages.|expand|$expand| + +#### Command `az databox job create` + +##### Example +``` +az databox job create --name "SdkJob952" --location "westus" --transfer-type "ImportToAzure" --details \ +"{\\"contactDetails\\":{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@microsoft.com\\"],\\"phone\ +\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"},\\"dataImportDetails\\":[{\\"accountDetails\\":{\\"dataAccountTyp\ +e\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourcegroups/\ +databoxbvt/providers/Microsoft.Storage/storageAccounts/databoxbvttestaccount\\"}}],\\"jobDetailsType\\":\\"DataBox\\",\ +\\"shippingAddress\\":{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\ +\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 \ +TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit 1\\"}}" --sku name="DataBox" --resource-group "SdkRg5154" +``` +##### Example +``` +az databox job create --name "SdkJob9640" --location "westus" --transfer-type "ImportToAzure" --details \ +"{\\"contactDetails\\":{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@microsoft.com\\"],\\"phone\ +\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"},\\"dataImportDetails\\":[{\\"accountDetails\\":{\\"dataAccountTyp\ +e\\":\\"StorageAccount\\",\\"sharePassword\\":\\"\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8\ +ff7-4a25-95c7-ce9da541242f/resourceGroups/databoxbvt1/providers/Microsoft.Storage/storageAccounts/databoxbvttestaccount\ +2\\"}}],\\"devicePassword\\":\\"\\",\\"jobDetailsType\\":\\"DataBox\\",\\"shippingAddress\\":{\\"addres\ +sType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"po\ +stalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND \ +ST\\",\\"streetAddress2\\":\\"Unit 1\\"}}" --sku name="DataBox" --resource-group "SdkRg7478" +``` +##### Example +``` +az databox job create --name "SdkJob6599" --location "westus" --transfer-type "ImportToAzure" --details \ +"{\\"contactDetails\\":{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@microsoft.com\\"],\\"phone\ +\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"},\\"dataImportDetails\\":[{\\"accountDetails\\":{\\"dataAccountTyp\ +e\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourcegroups/\ +databoxbvt/providers/Microsoft.Storage/storageAccounts/databoxbvttestaccount\\"}}],\\"jobDetailsType\\":\\"DataBox\\",\ +\\"preferences\\":{\\"encryptionPreferences\\":{\\"doubleEncryption\\":\\"Enabled\\"}},\\"shippingAddress\\":{\\"addres\ +sType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"po\ +stalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND \ +ST\\",\\"streetAddress2\\":\\"Unit 1\\"}}" --sku name="DataBox" --resource-group "SdkRg608" +``` +##### Example +``` +az databox job create --name "SdkJob6429" --location "westus" --transfer-type "ExportFromAzure" --details \ +"{\\"contactDetails\\":{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@microsoft.com\\"],\\"phone\ +\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"},\\"dataExportDetails\\":[{\\"accountDetails\\":{\\"dataAccountTyp\ +e\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/\ +akvenkat/providers/Microsoft.Storage/storageAccounts/aaaaaa2\\"},\\"transferConfiguration\\":{\\"transferAllDetails\\":\ +{\\"include\\":{\\"dataAccountType\\":\\"StorageAccount\\",\\"transferAllBlobs\\":true,\\"transferAllFiles\\":true}},\\\ +"transferConfigurationType\\":\\"TransferAll\\"}}],\\"jobDetailsType\\":\\"DataBox\\",\\"shippingAddress\\":{\\"address\ +Type\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"pos\ +talCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND \ +ST\\",\\"streetAddress2\\":\\"Unit 1\\"}}" --sku name="DataBox" --resource-group "SdkRg8091" +``` +##### Example +``` +az databox job create --name "SdkJob5337" --type "UserAssigned" --user-assigned-identities \ +"{\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/akvenkat/providers/Microsoft.ManagedIdentity/us\ +erAssignedIdentities/sdkIdentity\\":{}}" --location "westus" --transfer-type "ImportToAzure" --details \ +"{\\"contactDetails\\":{\\"contactName\\":\\"Public SDK Test\\",\\"emailList\\":[\\"testing@microsoft.com\\"],\\"phone\ +\\":\\"1234567890\\",\\"phoneExtension\\":\\"1234\\"},\\"dataImportDetails\\":[{\\"accountDetails\\":{\\"dataAccountTyp\ +e\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/\ +databoxbvt1/providers/Microsoft.Storage/storageAccounts/databoxbvttestaccount2\\"}}],\\"jobDetailsType\\":\\"DataBox\\"\ +,\\"shippingAddress\\":{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsof\ +t\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 \ +TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit 1\\"}}" --sku name="DataBox" --resource-group "SdkRg7552" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The Resource Group Name|resource_group_name|resourceGroupName| +|**--job-name**|string|The name of the job Resource within the specified resource group. job names must be between 3 and 24 characters in length and use any alphanumeric and underscore only|job_name|jobName| +|**--location**|string|The location of the resource. This will be one of the supported and registered Azure Regions (e.g. West US, East US, Southeast Asia, etc.). The region of a resource cannot be changed once it is created, but if an identical region is specified on update the request will succeed.|location|location| +|**--sku**|object|The sku type.|sku|sku| +|**--transfer-type**|sealed-choice|Type of the data transfer.|transfer_type|transferType| +|**--tags**|dictionary|The list of key value pairs that describe the resource. These tags can be used in viewing and grouping this resource (across resource groups).|tags|tags| +|**--type**|string|Identity type|type|type| +|**--user-assigned-identities**|dictionary|User Assigned Identities|user_assigned_identities|userAssignedIdentities| +|**--details**|object|Details of a job run. This field will only be sent for expand details filter.|details|details| +|**--delivery-type**|sealed-choice|Delivery type of Job.|delivery_type|deliveryType| +|**--scheduled-date-time**|date-time|Scheduled date time.|scheduled_date_time|scheduledDateTime| + +#### Command `az databox job update` + +##### Example +``` +az databox job update --name "SdkJob952" --contact-details contact-name="Update Job" email-list="testing@microsoft.com"\ + phone="1234567890" phone-extension="1234" --shipping-address address-type="Commercial" city="San Francisco" \ +company-name="Microsoft" country="US" postal-code="94107" state-or-province="CA" street-address1="16 TOWNSEND ST" \ +street-address2="Unit 1" --resource-group "SdkRg5154" +``` +##### Example +``` +az databox job update --name "SdkJob1735" --key-encryption-key "{\\"kekType\\":\\"CustomerManaged\\",\\"kekUrl\\":\\"ht\ +tps://sdkkeyvault.vault.azure.net/keys/SSDKEY/\\",\\"kekVaultResourceID\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce\ +9da541242f/resourceGroups/akvenkat/providers/Microsoft.KeyVault/vaults/SDKKeyVault\\"}" --resource-group "SdkRg7937" +``` +##### Example +``` +az databox job update --name "SdkJob2965" --type "SystemAssigned,UserAssigned" --user-assigned-identities \ +"{\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/akvenkat/providers/Microsoft.ManagedIdentity/us\ +erAssignedIdentities/sdkIdentity\\":{}}" --key-encryption-key "{\\"identityProperties\\":{\\"type\\":\\"UserAssigned\\"\ +,\\"userAssigned\\":{\\"resourceId\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourceGroups/akvenkat/pr\ +oviders/Microsoft.ManagedIdentity/userAssignedIdentities/sdkIdentity\\"}},\\"kekType\\":\\"CustomerManaged\\",\\"kekUrl\ +\\":\\"https://sdkkeyvault.vault.azure.net/keys/SSDKEY/\\",\\"kekVaultResourceID\\":\\"/subscriptions/fa68082f-8ff7-4a2\ +5-95c7-ce9da541242f/resourceGroups/akvenkat/providers/Microsoft.KeyVault/vaults/SDKKeyVault\\"}" --resource-group \ +"SdkRg9765" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The Resource Group Name|resource_group_name|resourceGroupName| +|**--job-name**|string|The name of the job Resource within the specified resource group. job names must be between 3 and 24 characters in length and use any alphanumeric and underscore only|job_name|jobName| +|**--if-match**|string|Defines the If-Match condition. The patch will be performed only if the ETag of the job on the server matches this value.|if_match|If-Match| +|**--tags**|dictionary|The list of key value pairs that describe the resource. These tags can be used in viewing and grouping this resource (across resource groups).|tags|tags| +|**--contact-details**|object|Contact details for notification and shipping.|contact_details|contactDetails| +|**--shipping-address**|object|Shipping address of the customer.|shipping_address|shippingAddress| +|**--key-encryption-key**|object|Key encryption key for the job.|key_encryption_key|keyEncryptionKey| +|**--return-package-details**|object|Return package details of job. This is applicable only for customer disk sku|return_package_details|returnPackageDetails| +|**--type**|string|Identity type|type|type| +|**--user-assigned-identities**|dictionary|User Assigned Identities|user_assigned_identities|userAssignedIdentities| + +#### Command `az databox job delete` + +##### Example +``` +az databox job delete --name "SdkJob952" --resource-group "SdkRg5154" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The Resource Group Name|resource_group_name|resourceGroupName| +|**--job-name**|string|The name of the job Resource within the specified resource group. job names must be between 3 and 24 characters in length and use any alphanumeric and underscore only|job_name|jobName| + +#### Command `az databox job book-shipment-pick-up` + +##### Example +``` +az databox job book-shipment-pick-up --name "TJ-636646322037905056" --resource-group "bvttoolrg6" --end-time \ +"2019-09-22T18:30:00Z" --shipment-location "Front desk" --start-time "2019-09-20T18:30:00Z" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The Resource Group Name|resource_group_name|resourceGroupName| +|**--job-name**|string|The name of the job Resource within the specified resource group. job names must be between 3 and 24 characters in length and use any alphanumeric and underscore only|job_name|jobName| +|**--start-time**|date-time|Minimum date after which the pick up should commence, this must be in local time of pick up area.|start_time|startTime| +|**--end-time**|date-time|Maximum date before which the pick up should commence, this must be in local time of pick up area.|end_time|endTime| +|**--shipment-location**|string|Shipment Location in the pickup place. Eg.front desk|shipment_location|shipmentLocation| + +#### Command `az databox job cancel` + +##### Example +``` +az databox job cancel --reason "CancelTest" --name "SdkJob952" --resource-group "SdkRg5154" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The Resource Group Name|resource_group_name|resourceGroupName| +|**--job-name**|string|The name of the job Resource within the specified resource group. job names must be between 3 and 24 characters in length and use any alphanumeric and underscore only|job_name|jobName| +|**--reason**|string|Reason for cancellation.|reason|reason| + +#### Command `az databox job list-credentials` + +##### Example +``` +az databox job list-credentials --name "TJ-636646322037905056" --resource-group "bvttoolrg6" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The Resource Group Name|resource_group_name|resourceGroupName| +|**--job-name**|string|The name of the job Resource within the specified resource group. job names must be between 3 and 24 characters in length and use any alphanumeric and underscore only|job_name|jobName| + +#### Command `az databox job mark-device-shipped` + +##### Example +``` +az databox job mark-device-shipped --name "SdkJob8367" --delivery-package-details carrier-name="DHL" \ +tracking-id="123456" --resource-group "SdkRg9836" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--job-name**|string|The name of the job Resource within the specified resource group. job names must be between 3 and 24 characters in length and use any alphanumeric and underscore only|job_name|jobName| +|**--resource-group-name**|string|The Resource Group Name|resource_group_name|resourceGroupName| +|**--delivery-package-details**|object|Delivery package details|delivery_package_details|deliveryPackageDetails| + +### group `az databox service` +#### Command `az databox service list-available-sku-by-resource-group` + +##### Example +``` +az databox service list-available-sku-by-resource-group --country "US" --available-sku-request-location "westus" \ +--transfer-type "ImportToAzure" --location "westus" --resource-group "bvttoolrg6" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The Resource Group Name|resource_group_name|resourceGroupName| +|**--location**|string|The location of the resource|location|location| +|**--transfer-type**|sealed-choice|Type of the transfer.|transfer_type|transferType| +|**--country**|string|ISO country code. Country for hardware shipment. For codes check: https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements|country|country| +|**--available-sku-request-location**|string|Location for data transfer. For locations check: https://management.azure.com/subscriptions/SUBSCRIPTIONID/locations?api-version=2018-01-01|available_sku_request_location|location| +|**--sku-names**|array|Sku Names to filter for available skus|sku_names|skuNames| + +#### Command `az databox service region-configuration` + +##### Example +``` +az databox service region-configuration --location "westus" --schedule-availability-request \ +"{\\"skuName\\":\\"DataBox\\",\\"storageLocation\\":\\"westus\\"}" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--location**|string|The location of the resource|location|location| +|**--data-box-schedule-availability-request**|object|Request body to get the availability for scheduling data box orders orders.|data_box_schedule_availability_request|DataBoxScheduleAvailabilityRequest| +|**--disk-schedule-availability-request**|object|Request body to get the availability for scheduling disk orders.|disk_schedule_availability_request|DiskScheduleAvailabilityRequest| +|**--heavy-schedule-availability-request**|object|Request body to get the availability for scheduling heavy orders.|heavy_schedule_availability_request|HeavyScheduleAvailabilityRequest| +|**--datacenter-address-request**|object|Request body to get the datacenter address .|datacenter_address_request|datacenterAddressRequest| +|**--sku-name**|sealed-choice|Type of the device.|sku_name|skuName| + +#### Command `az databox service region-configuration-by-resource-group` + +##### Example +``` +az databox service region-configuration-by-resource-group --location "westus" --schedule-availability-request \ +"{\\"skuName\\":\\"DataBox\\",\\"storageLocation\\":\\"westus\\"}" --resource-group "SdkRg4981" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The Resource Group Name|resource_group_name|resourceGroupName| +|**--location**|string|The location of the resource|location|location| +|**--data-box-schedule-availability-request**|object|Request body to get the availability for scheduling data box orders orders.|data_box_schedule_availability_request|DataBoxScheduleAvailabilityRequest| +|**--disk-schedule-availability-request**|object|Request body to get the availability for scheduling disk orders.|disk_schedule_availability_request|DiskScheduleAvailabilityRequest| +|**--heavy-schedule-availability-request**|object|Request body to get the availability for scheduling heavy orders.|heavy_schedule_availability_request|HeavyScheduleAvailabilityRequest| +|**--datacenter-address-request**|object|Request body to get the datacenter address .|datacenter_address_request|datacenterAddressRequest| +|**--sku-name**|sealed-choice|Type of the device.|sku_name|skuName| + +#### Command `az databox service validate-address` + +##### Example +``` +az databox service validate-address --location "westus" --device-type "DataBox" --shipping-address \ +address-type="Commercial" city="San Francisco" company-name="Microsoft" country="US" postal-code="94107" \ +state-or-province="CA" street-address1="16 TOWNSEND ST" street-address2="Unit 1" --validation-type "ValidateAddress" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--location**|string|The location of the resource|location|location| +|**--validation-type**|sealed-choice|Identifies the type of validation request.|validation_type|validationType| +|**--shipping-address**|object|Shipping address of the customer.|shipping_address|shippingAddress| +|**--device-type**|sealed-choice|Device type to be used for the job.|device_type|deviceType| +|**--preferred-shipment-type**|sealed-choice|Indicates Shipment Logistics type that the customer preferred.|preferred_shipment_type|preferredShipmentType| + +#### Command `az databox service validate-input` + +##### Example +``` +az databox service validate-input --location "westus" --validation-request "{\\"individualRequestDetails\\":[{\\"dataIm\ +portDetails\\":[{\\"accountDetails\\":{\\"dataAccountType\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscripti\ +ons/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourcegroups/databoxbvt/providers/Microsoft.Storage/storageAccounts/databoxb\ +vttestaccount\\"}}],\\"deviceType\\":\\"DataBox\\",\\"transferType\\":\\"ImportToAzure\\",\\"validationType\\":\\"Valid\ +ateDataTransferDetails\\"},{\\"deviceType\\":\\"DataBox\\",\\"shippingAddress\\":{\\"addressType\\":\\"Commercial\\",\\\ +"city\\":\\"San Francisco\\",\\"companyName\\":\\"Microsoft\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"s\ +tateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit \ +1\\"},\\"transportPreferences\\":{\\"preferredShipmentType\\":\\"MicrosoftManaged\\"},\\"validationType\\":\\"ValidateA\ +ddress\\"},{\\"validationType\\":\\"ValidateSubscriptionIsAllowedToCreateJob\\"},{\\"country\\":\\"US\\",\\"deviceType\ +\\":\\"DataBox\\",\\"location\\":\\"westus\\",\\"transferType\\":\\"ImportToAzure\\",\\"validationType\\":\\"ValidateSk\ +uAvailability\\"},{\\"deviceType\\":\\"DataBox\\",\\"validationType\\":\\"ValidateCreateOrderLimit\\"},{\\"deviceType\\\ +":\\"DataBox\\",\\"preference\\":{\\"transportPreferences\\":{\\"preferredShipmentType\\":\\"MicrosoftManaged\\"}},\\"v\ +alidationType\\":\\"ValidatePreferences\\"}],\\"validationCategory\\":\\"JobCreationValidation\\"}" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--location**|string|The location of the resource|location|location| +|**--create-job-validations**|object|It does all pre-job creation validations.|create_job_validations|CreateJobValidations| + +#### Command `az databox service validate-input-by-resource-group` + +##### Example +``` +az databox service validate-input-by-resource-group --location "westus" --resource-group "SdkRg6861" \ +--validation-request "{\\"individualRequestDetails\\":[{\\"dataImportDetails\\":[{\\"accountDetails\\":{\\"dataAccountT\ +ype\\":\\"StorageAccount\\",\\"storageAccountId\\":\\"/subscriptions/fa68082f-8ff7-4a25-95c7-ce9da541242f/resourcegroup\ +s/databoxbvt/providers/Microsoft.Storage/storageAccounts/databoxbvttestaccount\\"}}],\\"deviceType\\":\\"DataBox\\",\\"\ +transferType\\":\\"ImportToAzure\\",\\"validationType\\":\\"ValidateDataTransferDetails\\"},{\\"deviceType\\":\\"DataBo\ +x\\",\\"shippingAddress\\":{\\"addressType\\":\\"Commercial\\",\\"city\\":\\"San Francisco\\",\\"companyName\\":\\"Micr\ +osoft\\",\\"country\\":\\"US\\",\\"postalCode\\":\\"94107\\",\\"stateOrProvince\\":\\"CA\\",\\"streetAddress1\\":\\"16 \ +TOWNSEND ST\\",\\"streetAddress2\\":\\"Unit 1\\"},\\"transportPreferences\\":{\\"preferredShipmentType\\":\\"MicrosoftM\ +anaged\\"},\\"validationType\\":\\"ValidateAddress\\"},{\\"validationType\\":\\"ValidateSubscriptionIsAllowedToCreateJo\ +b\\"},{\\"country\\":\\"US\\",\\"deviceType\\":\\"DataBox\\",\\"location\\":\\"westus\\",\\"transferType\\":\\"ImportTo\ +Azure\\",\\"validationType\\":\\"ValidateSkuAvailability\\"},{\\"deviceType\\":\\"DataBox\\",\\"validationType\\":\\"Va\ +lidateCreateOrderLimit\\"},{\\"deviceType\\":\\"DataBox\\",\\"preference\\":{\\"transportPreferences\\":{\\"preferredSh\ +ipmentType\\":\\"MicrosoftManaged\\"}},\\"validationType\\":\\"ValidatePreferences\\"}],\\"validationCategory\\":\\"Job\ +CreationValidation\\"}" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The Resource Group Name|resource_group_name|resourceGroupName| +|**--location**|string|The location of the resource|location|location| +|**--create-job-validations**|object|It does all pre-job creation validations.|create_job_validations|CreateJobValidations| diff --git a/src/databox/setup.cfg b/src/databox/setup.cfg index 3c6e79cf31d..2fdd96e5d39 100644 --- a/src/databox/setup.cfg +++ b/src/databox/setup.cfg @@ -1,2 +1 @@ -[bdist_wheel] -universal=1 +#setup.cfg \ No newline at end of file diff --git a/src/databox/setup.py b/src/databox/setup.py index 065d917f533..ed1ec325aa7 100644 --- a/src/databox/setup.py +++ b/src/databox/setup.py @@ -8,15 +8,13 @@ from codecs import open from setuptools import setup, find_packages -try: - from azure_bdist_wheel import cmdclass -except ImportError: - from distutils import log as logger - logger.warn("Wheel is not available, disabling bdist_wheel hook") -# TODO: Confirm this is the right version number you want and it matches your # HISTORY.rst entry. -VERSION = '0.1.3' +VERSION = '0.1.0' +try: + from azext_databox.manual.version import VERSION +except ImportError: + pass # The full list of classifiers is available at # https://pypi.python.org/pypi?%3Aaction=list_classifiers @@ -26,17 +24,19 @@ 'Intended Audience :: System Administrators', 'Programming Language :: Python', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'License :: OSI Approved :: MIT License', ] -# TODO: Add any additional SDK dependencies here DEPENDENCIES = [] +try: + from azext_databox.manual.dependency import DEPENDENCIES +except ImportError: + pass + with open('README.md', 'r', encoding='utf-8') as f: README = f.read() with open('HISTORY.rst', 'r', encoding='utf-8') as f: @@ -45,8 +45,7 @@ setup( name='databox', version=VERSION, - description='Microsoft Azure Command-Line Tools DataBox Extension', - # TODO: Update author and email, if applicable + description='Microsoft Azure Command-Line Tools DataBoxManagementClient Extension', author='Microsoft Corporation', author_email='azpycli@microsoft.com', url='https://github.com/Azure/azure-cli-extensions/tree/master/src/databox',